OLD | NEW |
---|---|
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/object.h" | 5 #include "vm/object.h" |
6 | 6 |
7 #include "include/dart_api.h" | 7 #include "include/dart_api.h" |
8 #include "platform/assert.h" | 8 #include "platform/assert.h" |
9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 645 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
656 cls = Class::New<Array>(); | 656 cls = Class::New<Array>(); |
657 isolate->object_store()->set_array_class(cls); | 657 isolate->object_store()->set_array_class(cls); |
658 cls.set_type_arguments_field_offset(Array::type_arguments_offset()); | 658 cls.set_type_arguments_field_offset(Array::type_arguments_offset()); |
659 cls.set_num_type_arguments(1); | 659 cls.set_num_type_arguments(1); |
660 cls.set_num_own_type_arguments(1); | 660 cls.set_num_own_type_arguments(1); |
661 cls = Class::New<Array>(kImmutableArrayCid); | 661 cls = Class::New<Array>(kImmutableArrayCid); |
662 isolate->object_store()->set_immutable_array_class(cls); | 662 isolate->object_store()->set_immutable_array_class(cls); |
663 cls.set_type_arguments_field_offset(Array::type_arguments_offset()); | 663 cls.set_type_arguments_field_offset(Array::type_arguments_offset()); |
664 cls.set_num_type_arguments(1); | 664 cls.set_num_type_arguments(1); |
665 cls.set_num_own_type_arguments(1); | 665 cls.set_num_own_type_arguments(1); |
666 cls = Class::New<GrowableObjectArray>(); | |
667 isolate->object_store()->set_growable_object_array_class(cls); | |
668 cls.set_type_arguments_field_offset( | |
669 GrowableObjectArray::type_arguments_offset()); | |
670 cls.set_num_type_arguments(1); | |
666 cls = Class::NewStringClass(kOneByteStringCid); | 671 cls = Class::NewStringClass(kOneByteStringCid); |
667 isolate->object_store()->set_one_byte_string_class(cls); | 672 isolate->object_store()->set_one_byte_string_class(cls); |
668 cls = Class::NewStringClass(kTwoByteStringCid); | 673 cls = Class::NewStringClass(kTwoByteStringCid); |
669 isolate->object_store()->set_two_byte_string_class(cls); | 674 isolate->object_store()->set_two_byte_string_class(cls); |
670 cls = Class::New<Mint>(); | 675 cls = Class::New<Mint>(); |
671 isolate->object_store()->set_mint_class(cls); | 676 isolate->object_store()->set_mint_class(cls); |
672 cls = Class::New<Bigint>(); | 677 cls = Class::New<Bigint>(); |
673 isolate->object_store()->set_bigint_class(cls); | 678 isolate->object_store()->set_bigint_class(cls); |
674 cls = Class::New<Double>(); | 679 cls = Class::New<Double>(); |
675 isolate->object_store()->set_double_class(cls); | 680 isolate->object_store()->set_double_class(cls); |
(...skipping 2397 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3073 String::Handle(BuildClosureSource(param_names, expr)); | 3078 String::Handle(BuildClosureSource(param_names, expr)); |
3074 Script& script = Script::Handle(); | 3079 Script& script = Script::Handle(); |
3075 script = Script::New(Symbols::EvalSourceUri(), | 3080 script = Script::New(Symbols::EvalSourceUri(), |
3076 func_src, | 3081 func_src, |
3077 RawScript::kEvaluateTag); | 3082 RawScript::kEvaluateTag); |
3078 // In order to tokenize the source, we need to get the key to mangle | 3083 // In order to tokenize the source, we need to get the key to mangle |
3079 // private names from the library from which the class originates. | 3084 // private names from the library from which the class originates. |
3080 const Library& lib = Library::Handle(cls.library()); | 3085 const Library& lib = Library::Handle(cls.library()); |
3081 ASSERT(!lib.IsNull()); | 3086 ASSERT(!lib.IsNull()); |
3082 const String& lib_key = String::Handle(lib.private_key()); | 3087 const String& lib_key = String::Handle(lib.private_key()); |
3083 script.Tokenize(lib_key); | 3088 script.Tokenize(lib_key, false); |
3084 | 3089 |
3085 const Function& func = Function::Handle( | 3090 const Function& func = Function::Handle( |
3086 Function::NewEvalFunction(cls, script, is_static)); | 3091 Function::NewEvalFunction(cls, script, is_static)); |
3087 func.set_result_type(Object::dynamic_type()); | 3092 func.set_result_type(Object::dynamic_type()); |
3088 const intptr_t num_implicit_params = is_static ? 0 : 1; | 3093 const intptr_t num_implicit_params = is_static ? 0 : 1; |
3089 func.set_num_fixed_parameters(num_implicit_params + param_names.Length()); | 3094 func.set_num_fixed_parameters(num_implicit_params + param_names.Length()); |
3090 func.SetNumOptionalParameters(0, true); | 3095 func.SetNumOptionalParameters(0, true); |
3091 func.SetIsOptimizable(false); | 3096 func.SetIsOptimizable(false); |
3092 return func.raw(); | 3097 return func.raw(); |
3093 } | 3098 } |
(...skipping 4954 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8048 const String& token = String::Handle(literal()); | 8053 const String& token = String::Handle(literal()); |
8049 return token.ToCString(); | 8054 return token.ToCString(); |
8050 } | 8055 } |
8051 | 8056 |
8052 | 8057 |
8053 void LiteralToken::PrintJSONImpl(JSONStream* stream, bool ref) const { | 8058 void LiteralToken::PrintJSONImpl(JSONStream* stream, bool ref) const { |
8054 Object::PrintJSONImpl(stream, ref); | 8059 Object::PrintJSONImpl(stream, ref); |
8055 } | 8060 } |
8056 | 8061 |
8057 | 8062 |
8058 RawArray* TokenStream::TokenObjects() const { | 8063 RawGrowableObjectArray* TokenStream::TokenObjects() const { |
8059 return raw_ptr()->token_objects_; | 8064 return raw_ptr()->token_objects_; |
8060 } | 8065 } |
8061 | 8066 |
8062 | 8067 |
8063 void TokenStream::SetTokenObjects(const Array& value) const { | 8068 void TokenStream::SetTokenObjects(const GrowableObjectArray& value) const { |
8064 StorePointer(&raw_ptr()->token_objects_, value.raw()); | 8069 StorePointer(&raw_ptr()->token_objects_, value.raw()); |
8065 } | 8070 } |
8066 | 8071 |
8067 | 8072 |
8068 RawExternalTypedData* TokenStream::GetStream() const { | 8073 RawExternalTypedData* TokenStream::GetStream() const { |
8069 return raw_ptr()->stream_; | 8074 return raw_ptr()->stream_; |
8070 } | 8075 } |
8071 | 8076 |
8072 | 8077 |
8073 void TokenStream::SetStream(const ExternalTypedData& value) const { | 8078 void TokenStream::SetStream(const ExternalTypedData& value) const { |
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8335 return descriptor.literal->Hash(); | 8340 return descriptor.literal->Hash(); |
8336 } | 8341 } |
8337 | 8342 |
8338 static uword Hash(const Object& key) { | 8343 static uword Hash(const Object& key) { |
8339 if (key.IsLiteralToken()) { | 8344 if (key.IsLiteralToken()) { |
8340 return String::HashRawSymbol(LiteralToken::Cast(key).literal()); | 8345 return String::HashRawSymbol(LiteralToken::Cast(key).literal()); |
8341 } else { | 8346 } else { |
8342 return String::Cast(key).Hash(); | 8347 return String::Cast(key).Hash(); |
8343 } | 8348 } |
8344 } | 8349 } |
8345 | |
8346 static RawObject* NewKey(const Scanner::TokenDescriptor& descriptor) { | |
8347 return LiteralToken::New(descriptor.kind, *descriptor.literal); | |
8348 } | |
8349 }; | 8350 }; |
8350 typedef UnorderedHashMap<CompressedTokenTraits> CompressedTokenMap; | 8351 typedef UnorderedHashMap<CompressedTokenTraits> CompressedTokenMap; |
8351 | 8352 |
8352 | 8353 |
8353 // Helper class for creation of compressed token stream data. | 8354 // Helper class for creation of compressed token stream data. |
8354 class CompressedTokenStreamData : public ValueObject { | 8355 class CompressedTokenStreamData : public ValueObject { |
8355 public: | 8356 public: |
8356 static const intptr_t kInitialBufferSize = 16 * KB; | 8357 static const intptr_t kInitialBufferSize = 16 * KB; |
8357 CompressedTokenStreamData() : | 8358 static const bool kPrintTokenObjects = false; |
siva
2015/10/28 18:30:22
Instead of a compile time option to toggle this tr
hausner
2015/10/28 21:28:46
I expect this to go away soon, so I didn't want to
| |
8359 | |
8360 CompressedTokenStreamData(const GrowableObjectArray& ta, | |
8361 CompressedTokenMap* map) : | |
8358 buffer_(NULL), | 8362 buffer_(NULL), |
8359 stream_(&buffer_, Reallocate, kInitialBufferSize), | 8363 stream_(&buffer_, Reallocate, kInitialBufferSize), |
8360 tokens_(HashTables::New<CompressedTokenMap>(kInitialTableSize)) { | 8364 token_objects_(ta), |
8361 } | 8365 tokens_(map) { |
8362 ~CompressedTokenStreamData() { | |
8363 // Safe to discard the hash table now. | |
8364 tokens_.Release(); | |
8365 } | 8366 } |
8366 | 8367 |
8367 // Add an IDENT token into the stream and the token hash map. | 8368 // Add an IDENT token into the stream and the token hash map. |
8368 void AddIdentToken(const String* ident) { | 8369 void AddIdentToken(const String& ident) { |
8369 ASSERT(ident->IsSymbol()); | 8370 ASSERT(ident.IsSymbol()); |
8370 const intptr_t fresh_index = tokens_.NumOccupied(); | 8371 const intptr_t fresh_index = token_objects_.Length(); |
8371 intptr_t index = Smi::Value(Smi::RawCast( | 8372 intptr_t index = Smi::Value(Smi::RawCast( |
8372 tokens_.InsertOrGetValue(*ident, | 8373 tokens_->InsertOrGetValue(ident, |
8373 Smi::Handle(Smi::New(fresh_index))))); | 8374 Smi::Handle(Smi::New(fresh_index))))); |
8375 if (index == fresh_index) { | |
8376 token_objects_.Add(ident); | |
8377 if (kPrintTokenObjects) { | |
8378 int iid = Isolate::Current()->main_port() % 1024; | |
8379 OS::Print("ident %03x %p <%s>\n", | |
8380 iid, ident.raw(), ident.ToCString()); | |
8381 } | |
8382 } | |
8374 WriteIndex(index); | 8383 WriteIndex(index); |
8375 } | 8384 } |
8376 | 8385 |
8377 // Add a LITERAL token into the stream and the token hash map. | 8386 // Add a LITERAL token into the stream and the token hash map. |
8378 void AddLiteralToken(const Scanner::TokenDescriptor& descriptor) { | 8387 void AddLiteralToken(const Scanner::TokenDescriptor& descriptor) { |
8379 ASSERT(descriptor.literal->IsSymbol()); | 8388 ASSERT(descriptor.literal->IsSymbol()); |
8380 const intptr_t fresh_index = tokens_.NumOccupied(); | 8389 bool is_present = false; |
8381 intptr_t index = Smi::Value(Smi::RawCast( | 8390 const Object& index_value = |
8382 tokens_.InsertNewOrGetValue(descriptor, | 8391 Object::Handle(tokens_->GetOrNull(descriptor, &is_present)); |
siva
2015/10/28 18:30:22
You could create these handles (Object, LiteralTok
hausner
2015/10/28 21:28:46
Done.
| |
8383 Smi::Handle(Smi::New(fresh_index))))); | 8392 intptr_t index = -1; |
8393 if (is_present) { | |
8394 ASSERT(index_value.IsSmi()); | |
8395 index = Smi::Cast(index_value).Value(); | |
8396 } else { | |
8397 const intptr_t fresh_index = token_objects_.Length(); | |
8398 const LiteralToken& lit = LiteralToken::Handle( | |
8399 LiteralToken::New(descriptor.kind, *descriptor.literal)); | |
8400 index = Smi::Value(Smi::RawCast( | |
8401 tokens_->InsertOrGetValue(lit, Smi::Handle(Smi::New(fresh_index))))); | |
8402 token_objects_.Add(lit); | |
8403 if (kPrintTokenObjects) { | |
8404 int iid = Isolate::Current()->main_port() % 1024; | |
8405 printf("lit %03x %p %p %p <%s>\n", | |
8406 iid, token_objects_.raw(), lit.literal(), lit.value(), | |
8407 String::Handle(lit.literal()).ToCString()); | |
8408 } | |
8409 } | |
8384 WriteIndex(index); | 8410 WriteIndex(index); |
8385 } | 8411 } |
8386 | 8412 |
8387 // Add a simple token into the stream. | 8413 // Add a simple token into the stream. |
8388 void AddSimpleToken(intptr_t kind) { | 8414 void AddSimpleToken(intptr_t kind) { |
8389 stream_.WriteUnsigned(kind); | 8415 stream_.WriteUnsigned(kind); |
8390 } | 8416 } |
8391 | 8417 |
8392 // Return the compressed token stream. | 8418 // Return the compressed token stream. |
8393 uint8_t* GetStream() const { return buffer_; } | 8419 uint8_t* GetStream() const { return buffer_; } |
8394 | 8420 |
8395 // Return the compressed token stream length. | 8421 // Return the compressed token stream length. |
8396 intptr_t Length() const { return stream_.bytes_written(); } | 8422 intptr_t Length() const { return stream_.bytes_written(); } |
8397 | 8423 |
8398 // Generate and return the token objects array. | |
8399 RawArray* MakeTokenObjectsArray() const { | |
8400 Array& result = Array::Handle( | |
8401 Array::New(tokens_.NumOccupied(), Heap::kOld)); | |
8402 CompressedTokenMap::Iterator it(&tokens_); | |
8403 Object& key = Object::Handle(); | |
8404 while (it.MoveNext()) { | |
8405 intptr_t entry = it.Current(); | |
8406 key = tokens_.GetKey(entry); | |
8407 result.SetAt(Smi::Value(Smi::RawCast(tokens_.GetPayload(entry, 0))), key); | |
8408 } | |
8409 return result.raw(); | |
8410 } | |
8411 | |
8412 private: | 8424 private: |
8413 void WriteIndex(intptr_t value) { | 8425 void WriteIndex(intptr_t value) { |
8414 stream_.WriteUnsigned(value + Token::kNumTokens); | 8426 stream_.WriteUnsigned(value + Token::kNumTokens); |
8415 } | 8427 } |
8416 | 8428 |
8417 static uint8_t* Reallocate(uint8_t* ptr, | 8429 static uint8_t* Reallocate(uint8_t* ptr, |
8418 intptr_t old_size, | 8430 intptr_t old_size, |
8419 intptr_t new_size) { | 8431 intptr_t new_size) { |
8420 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size); | 8432 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size); |
8421 return reinterpret_cast<uint8_t*>(new_ptr); | 8433 return reinterpret_cast<uint8_t*>(new_ptr); |
8422 } | 8434 } |
8423 | 8435 |
8424 static const intptr_t kInitialTableSize = 32; | |
8425 | |
8426 uint8_t* buffer_; | 8436 uint8_t* buffer_; |
8427 WriteStream stream_; | 8437 WriteStream stream_; |
8428 CompressedTokenMap tokens_; | 8438 const GrowableObjectArray& token_objects_; |
8439 CompressedTokenMap* tokens_; | |
8429 | 8440 |
8430 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData); | 8441 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData); |
8431 }; | 8442 }; |
8432 | 8443 |
8433 | 8444 |
8434 RawTokenStream* TokenStream::New(const Scanner::GrowableTokenStream& tokens, | 8445 RawTokenStream* TokenStream::New(const Scanner::GrowableTokenStream& tokens, |
8435 const String& private_key) { | 8446 const String& private_key, |
8436 Zone* zone = Thread::Current()->zone(); | 8447 bool use_shared_tokens) { |
8448 Thread* thread = Thread::Current(); | |
8449 Zone* zone = thread->zone(); | |
8437 // Copy the relevant data out of the scanner into a compressed stream of | 8450 // Copy the relevant data out of the scanner into a compressed stream of |
8438 // tokens. | 8451 // tokens. |
8439 CompressedTokenStreamData data; | 8452 |
8453 GrowableObjectArray& token_objects = GrowableObjectArray::Handle(zone); | |
8454 Array& token_objects_map = Array::Handle(zone); | |
8455 if (use_shared_tokens) { | |
8456 // Use the shared token objects array in the object store. Allocate | |
8457 // a new array if necessary. | |
8458 ObjectStore* store = thread->isolate()->object_store(); | |
8459 if (store->token_objects() == GrowableObjectArray::null()) { | |
8460 OpenSharedTokenList(thread->isolate()); | |
8461 } | |
8462 token_objects = store->token_objects(); | |
8463 token_objects_map = store->token_objects_map(); | |
8464 } else { | |
8465 // Use new, non-shared token array. | |
8466 const int kInitialPrivateCapacity = 256; | |
8467 token_objects = | |
8468 GrowableObjectArray::New(kInitialPrivateCapacity, Heap::kOld); | |
8469 token_objects_map = | |
8470 HashTables::New<CompressedTokenMap>(kInitialPrivateCapacity, | |
8471 Heap::kOld); | |
8472 } | |
8473 CompressedTokenMap map(token_objects_map.raw()); | |
8474 CompressedTokenStreamData data(token_objects, &map); | |
8475 | |
8440 intptr_t len = tokens.length(); | 8476 intptr_t len = tokens.length(); |
8441 for (intptr_t i = 0; i < len; i++) { | 8477 for (intptr_t i = 0; i < len; i++) { |
8442 Scanner::TokenDescriptor token = tokens[i]; | 8478 Scanner::TokenDescriptor token = tokens[i]; |
8443 if (token.kind == Token::kIDENT) { // Identifier token. | 8479 if (token.kind == Token::kIDENT) { // Identifier token. |
8444 data.AddIdentToken(token.literal); | 8480 data.AddIdentToken(*token.literal); |
8445 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token. | 8481 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token. |
8446 data.AddLiteralToken(token); | 8482 data.AddLiteralToken(token); |
8447 } else { // Keyword, pseudo keyword etc. | 8483 } else { // Keyword, pseudo keyword etc. |
8448 ASSERT(token.kind < Token::kNumTokens); | 8484 ASSERT(token.kind < Token::kNumTokens); |
8449 data.AddSimpleToken(token.kind); | 8485 data.AddSimpleToken(token.kind); |
8450 } | 8486 } |
8451 } | 8487 } |
8452 data.AddSimpleToken(Token::kEOS); // End of stream. | 8488 data.AddSimpleToken(Token::kEOS); // End of stream. |
8453 | 8489 |
8454 // Create and setup the token stream object. | 8490 // Create and setup the token stream object. |
8455 const ExternalTypedData& stream = ExternalTypedData::Handle( | 8491 const ExternalTypedData& stream = ExternalTypedData::Handle( |
8456 zone, | 8492 zone, |
8457 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, | 8493 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, |
8458 data.GetStream(), data.Length(), Heap::kOld)); | 8494 data.GetStream(), data.Length(), Heap::kOld)); |
8459 stream.AddFinalizer(data.GetStream(), DataFinalizer); | 8495 stream.AddFinalizer(data.GetStream(), DataFinalizer); |
8460 const TokenStream& result = TokenStream::Handle(zone, New()); | 8496 const TokenStream& result = TokenStream::Handle(zone, New()); |
8461 result.SetPrivateKey(private_key); | 8497 result.SetPrivateKey(private_key); |
8462 const Array& token_objects = | |
8463 Array::Handle(zone, data.MakeTokenObjectsArray()); | |
8464 { | 8498 { |
8465 NoSafepointScope no_safepoint; | 8499 NoSafepointScope no_safepoint; |
8466 result.SetStream(stream); | 8500 result.SetStream(stream); |
8467 result.SetTokenObjects(token_objects); | 8501 result.SetTokenObjects(token_objects); |
8468 } | 8502 } |
8503 | |
8504 token_objects_map = map.Release().raw(); | |
8505 if (use_shared_tokens) { | |
8506 thread->isolate()->object_store()->set_token_objects_map(token_objects_map); | |
8507 } | |
8469 return result.raw(); | 8508 return result.raw(); |
8470 } | 8509 } |
8471 | 8510 |
8472 | 8511 |
8512 void TokenStream::OpenSharedTokenList(Isolate* isolate) { | |
8513 const int kInitialSharedCapacity = 5*1024; | |
8514 ObjectStore* store = isolate->object_store(); | |
8515 ASSERT(store->token_objects() == GrowableObjectArray::null()); | |
8516 const GrowableObjectArray& token_objects = GrowableObjectArray::Handle( | |
8517 GrowableObjectArray::New(kInitialSharedCapacity, Heap::kOld)); | |
8518 store->set_token_objects(token_objects); | |
8519 const Array& token_objects_map = Array::Handle( | |
8520 HashTables::New<CompressedTokenMap>(kInitialSharedCapacity, | |
8521 Heap::kOld)); | |
8522 store->set_token_objects_map(token_objects_map); | |
8523 } | |
8524 | |
8525 | |
8526 void TokenStream::CloseSharedTokenList(Isolate* isolate) { | |
8527 isolate->object_store()->set_token_objects(GrowableObjectArray::Handle()); | |
8528 isolate->object_store()->set_token_objects_map(Array::Handle()); | |
siva
2015/10/28 18:30:22
set_token_objects_map(null_array());
Not sure if
hausner
2015/10/28 21:28:46
We did discuss it. I'll add this to the TokenItera
| |
8529 } | |
8530 | |
8531 | |
8473 const char* TokenStream::ToCString() const { | 8532 const char* TokenStream::ToCString() const { |
8474 return "TokenStream"; | 8533 return "TokenStream"; |
8475 } | 8534 } |
8476 | 8535 |
8477 | 8536 |
8478 void TokenStream::PrintJSONImpl(JSONStream* stream, bool ref) const { | 8537 void TokenStream::PrintJSONImpl(JSONStream* stream, bool ref) const { |
8479 JSONObject jsobj(stream); | 8538 JSONObject jsobj(stream); |
8480 AddCommonObjectProperties(&jsobj, "Object", ref); | 8539 AddCommonObjectProperties(&jsobj, "Object", ref); |
8481 // TODO(johnmccutchan): Generate a stable id. TokenStreams hang off | 8540 // TODO(johnmccutchan): Generate a stable id. TokenStreams hang off |
8482 // a Script object but do not have a back reference to generate a stable id. | 8541 // a Script object but do not have a back reference to generate a stable id. |
8483 jsobj.AddServiceId(*this); | 8542 jsobj.AddServiceId(*this); |
8484 if (ref) { | 8543 if (ref) { |
8485 return; | 8544 return; |
8486 } | 8545 } |
8487 const String& private_key = String::Handle(PrivateKey()); | 8546 const String& private_key = String::Handle(PrivateKey()); |
8488 jsobj.AddProperty("privateKey", private_key); | 8547 jsobj.AddProperty("privateKey", private_key); |
8489 // TODO(johnmccutchan): Add support for printing LiteralTokens and add | 8548 // TODO(johnmccutchan): Add support for printing LiteralTokens and add |
8490 // them to members array. | 8549 // them to members array. |
8491 JSONArray members(&jsobj, "members"); | 8550 JSONArray members(&jsobj, "members"); |
8492 } | 8551 } |
8493 | 8552 |
8494 | 8553 |
8495 TokenStream::Iterator::Iterator(const TokenStream& tokens, | 8554 TokenStream::Iterator::Iterator(const TokenStream& tokens, |
8496 intptr_t token_pos, | 8555 intptr_t token_pos, |
8497 Iterator::StreamType stream_type) | 8556 Iterator::StreamType stream_type) |
8498 : tokens_(TokenStream::Handle(tokens.raw())), | 8557 : tokens_(TokenStream::Handle(tokens.raw())), |
8499 data_(ExternalTypedData::Handle(tokens.GetStream())), | 8558 data_(ExternalTypedData::Handle(tokens.GetStream())), |
8500 stream_(reinterpret_cast<uint8_t*>(data_.DataAddr(0)), data_.Length()), | 8559 stream_(reinterpret_cast<uint8_t*>(data_.DataAddr(0)), data_.Length()), |
8501 token_objects_(Array::Handle(tokens.TokenObjects())), | 8560 token_objects_(GrowableObjectArray::Handle(tokens.TokenObjects())), |
8502 obj_(Object::Handle()), | 8561 obj_(Object::Handle()), |
8503 cur_token_pos_(token_pos), | 8562 cur_token_pos_(token_pos), |
8504 cur_token_kind_(Token::kILLEGAL), | 8563 cur_token_kind_(Token::kILLEGAL), |
8505 cur_token_obj_index_(-1), | 8564 cur_token_obj_index_(-1), |
8506 stream_type_(stream_type) { | 8565 stream_type_(stream_type) { |
8507 SetCurrentPosition(token_pos); | 8566 SetCurrentPosition(token_pos); |
8508 } | 8567 } |
8509 | 8568 |
8510 | 8569 |
8511 void TokenStream::Iterator::SetStream(const TokenStream& tokens, | 8570 void TokenStream::Iterator::SetStream(const TokenStream& tokens, |
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8762 void Script::set_kind(RawScript::Kind value) const { | 8821 void Script::set_kind(RawScript::Kind value) const { |
8763 StoreNonPointer(&raw_ptr()->kind_, value); | 8822 StoreNonPointer(&raw_ptr()->kind_, value); |
8764 } | 8823 } |
8765 | 8824 |
8766 | 8825 |
8767 void Script::set_tokens(const TokenStream& value) const { | 8826 void Script::set_tokens(const TokenStream& value) const { |
8768 StorePointer(&raw_ptr()->tokens_, value.raw()); | 8827 StorePointer(&raw_ptr()->tokens_, value.raw()); |
8769 } | 8828 } |
8770 | 8829 |
8771 | 8830 |
8772 void Script::Tokenize(const String& private_key) const { | 8831 void Script::Tokenize(const String& private_key, |
8832 bool use_shared_tokens) const { | |
8773 Thread* thread = Thread::Current(); | 8833 Thread* thread = Thread::Current(); |
8774 Zone* zone = thread->zone(); | 8834 Zone* zone = thread->zone(); |
8775 const TokenStream& tkns = TokenStream::Handle(zone, tokens()); | 8835 const TokenStream& tkns = TokenStream::Handle(zone, tokens()); |
8776 if (!tkns.IsNull()) { | 8836 if (!tkns.IsNull()) { |
8777 // Already tokenized. | 8837 // Already tokenized. |
8778 return; | 8838 return; |
8779 } | 8839 } |
8780 // Get the source, scan and allocate the token stream. | 8840 // Get the source, scan and allocate the token stream. |
8781 VMTagScope tagScope(thread, VMTag::kCompileScannerTagId); | 8841 VMTagScope tagScope(thread, VMTag::kCompileScannerTagId); |
8782 CSTAT_TIMER_SCOPE(thread, scanner_timer); | 8842 CSTAT_TIMER_SCOPE(thread, scanner_timer); |
8783 const String& src = String::Handle(zone, Source()); | 8843 const String& src = String::Handle(zone, Source()); |
8784 Scanner scanner(src, private_key); | 8844 Scanner scanner(src, private_key); |
8785 const Scanner::GrowableTokenStream& ts = scanner.GetStream(); | 8845 const Scanner::GrowableTokenStream& ts = scanner.GetStream(); |
8786 INC_STAT(thread, num_tokens_scanned, ts.length()); | 8846 INC_STAT(thread, num_tokens_scanned, ts.length()); |
8787 set_tokens(TokenStream::Handle(zone, TokenStream::New(ts, private_key))); | 8847 set_tokens(TokenStream::Handle(zone, |
8848 TokenStream::New(ts, private_key, use_shared_tokens))); | |
8788 INC_STAT(thread, src_length, src.Length()); | 8849 INC_STAT(thread, src_length, src.Length()); |
8789 } | 8850 } |
8790 | 8851 |
8791 | 8852 |
8792 void Script::SetLocationOffset(intptr_t line_offset, | 8853 void Script::SetLocationOffset(intptr_t line_offset, |
8793 intptr_t col_offset) const { | 8854 intptr_t col_offset) const { |
8794 ASSERT(line_offset >= 0); | 8855 ASSERT(line_offset >= 0); |
8795 ASSERT(col_offset >= 0); | 8856 ASSERT(col_offset >= 0); |
8796 StoreNonPointer(&raw_ptr()->line_offset_, line_offset); | 8857 StoreNonPointer(&raw_ptr()->line_offset_, line_offset); |
8797 StoreNonPointer(&raw_ptr()->col_offset_, col_offset); | 8858 StoreNonPointer(&raw_ptr()->col_offset_, col_offset); |
(...skipping 12984 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
21782 return tag_label.ToCString(); | 21843 return tag_label.ToCString(); |
21783 } | 21844 } |
21784 | 21845 |
21785 | 21846 |
21786 void UserTag::PrintJSONImpl(JSONStream* stream, bool ref) const { | 21847 void UserTag::PrintJSONImpl(JSONStream* stream, bool ref) const { |
21787 Instance::PrintJSONImpl(stream, ref); | 21848 Instance::PrintJSONImpl(stream, ref); |
21788 } | 21849 } |
21789 | 21850 |
21790 | 21851 |
21791 } // namespace dart | 21852 } // namespace dart |
OLD | NEW |