Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(288)

Side by Side Diff: runtime/vm/object.cc

Issue 1969563002: Eliminate GrowableTokenStream (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/object.h" 5 #include "vm/object.h"
6 6
7 #include "include/dart_api.h" 7 #include "include/dart_api.h"
8 #include "platform/assert.h" 8 #include "platform/assert.h"
9 #include "vm/assembler.h" 9 #include "vm/assembler.h"
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
(...skipping 8439 matching lines...) Expand 10 before | Expand all | Expand 10 after
8450 return String::HashRawSymbol(LiteralToken::Cast(key).literal()); 8450 return String::HashRawSymbol(LiteralToken::Cast(key).literal());
8451 } else { 8451 } else {
8452 return String::Cast(key).Hash(); 8452 return String::Cast(key).Hash();
8453 } 8453 }
8454 } 8454 }
8455 }; 8455 };
8456 typedef UnorderedHashMap<CompressedTokenTraits> CompressedTokenMap; 8456 typedef UnorderedHashMap<CompressedTokenTraits> CompressedTokenMap;
8457 8457
8458 8458
8459 // Helper class for creation of compressed token stream data. 8459 // Helper class for creation of compressed token stream data.
8460 class CompressedTokenStreamData : public ValueObject { 8460 class CompressedTokenStreamData : public Scanner::TokenCollector {
8461 public: 8461 public:
8462 static const intptr_t kInitialBufferSize = 16 * KB; 8462 static const intptr_t kInitialBufferSize = 16 * KB;
8463 static const bool kPrintTokenObjects = false; 8463 static const bool kPrintTokenObjects = false;
8464 8464
8465 CompressedTokenStreamData(const GrowableObjectArray& ta, 8465 CompressedTokenStreamData(const GrowableObjectArray& ta,
8466 CompressedTokenMap* map) : 8466 CompressedTokenMap* map) :
8467 buffer_(NULL), 8467 buffer_(NULL),
8468 stream_(&buffer_, Reallocate, kInitialBufferSize), 8468 stream_(&buffer_, Reallocate, kInitialBufferSize),
8469 token_objects_(ta), 8469 token_objects_(ta),
8470 tokens_(map), 8470 tokens_(map),
8471 value_(Object::Handle()), 8471 value_(Object::Handle()),
8472 fresh_index_smi_(Smi::Handle()) { 8472 fresh_index_smi_(Smi::Handle()),
8473 num_tokens_collected_(0) {
8474 }
8475 virtual ~CompressedTokenStreamData() { }
8476
8477 virtual void AddToken(const Scanner::TokenDescriptor& token) {
8478 if (token.kind == Token::kIDENT) { // Identifier token.
8479 this->AddIdentToken(*token.literal);
siva 2016/05/11 20:21:27 isn't AddIdentToken(...) sufficient why this->AddI
8480 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token.
8481 this->AddLiteralToken(token);
8482 } else { // Keyword, pseudo keyword etc.
8483 ASSERT(token.kind < Token::kNumTokens);
8484 this->AddSimpleToken(token.kind);
8485 }
8486 num_tokens_collected_++;
8473 } 8487 }
8474 8488
8489 // Return the compressed token stream.
8490 uint8_t* GetStream() const { return buffer_; }
8491
8492 // Return the compressed token stream length.
8493 intptr_t Length() const { return stream_.bytes_written(); }
8494
8495 intptr_t NumTokens() const { return num_tokens_collected_; }
8496
8497 private:
8475 // Add an IDENT token into the stream and the token hash map. 8498 // Add an IDENT token into the stream and the token hash map.
8476 void AddIdentToken(const String& ident) { 8499 void AddIdentToken(const String& ident) {
8477 ASSERT(ident.IsSymbol()); 8500 ASSERT(ident.IsSymbol());
8478 const intptr_t fresh_index = token_objects_.Length(); 8501 const intptr_t fresh_index = token_objects_.Length();
8479 fresh_index_smi_ = Smi::New(fresh_index); 8502 fresh_index_smi_ = Smi::New(fresh_index);
8480 intptr_t index = Smi::Value(Smi::RawCast( 8503 intptr_t index = Smi::Value(Smi::RawCast(
8481 tokens_->InsertOrGetValue(ident, fresh_index_smi_))); 8504 tokens_->InsertOrGetValue(ident, fresh_index_smi_)));
8482 if (index == fresh_index) { 8505 if (index == fresh_index) {
8483 token_objects_.Add(ident); 8506 token_objects_.Add(ident);
8484 if (kPrintTokenObjects) { 8507 if (kPrintTokenObjects) {
(...skipping 30 matching lines...) Expand all
8515 } 8538 }
8516 } 8539 }
8517 WriteIndex(index); 8540 WriteIndex(index);
8518 } 8541 }
8519 8542
8520 // Add a simple token into the stream. 8543 // Add a simple token into the stream.
8521 void AddSimpleToken(intptr_t kind) { 8544 void AddSimpleToken(intptr_t kind) {
8522 stream_.WriteUnsigned(kind); 8545 stream_.WriteUnsigned(kind);
8523 } 8546 }
8524 8547
8525 // Return the compressed token stream.
8526 uint8_t* GetStream() const { return buffer_; }
8527
8528 // Return the compressed token stream length.
8529 intptr_t Length() const { return stream_.bytes_written(); }
8530
8531 private:
8532 void WriteIndex(intptr_t value) { 8548 void WriteIndex(intptr_t value) {
8533 stream_.WriteUnsigned(value + Token::kNumTokens); 8549 stream_.WriteUnsigned(value + Token::kNumTokens);
8534 } 8550 }
8535 8551
8536 static uint8_t* Reallocate(uint8_t* ptr, 8552 static uint8_t* Reallocate(uint8_t* ptr,
8537 intptr_t old_size, 8553 intptr_t old_size,
8538 intptr_t new_size) { 8554 intptr_t new_size) {
8539 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size); 8555 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size);
8540 return reinterpret_cast<uint8_t*>(new_ptr); 8556 return reinterpret_cast<uint8_t*>(new_ptr);
8541 } 8557 }
8542 8558
8543 uint8_t* buffer_; 8559 uint8_t* buffer_;
8544 WriteStream stream_; 8560 WriteStream stream_;
8545 const GrowableObjectArray& token_objects_; 8561 const GrowableObjectArray& token_objects_;
8546 CompressedTokenMap* tokens_; 8562 CompressedTokenMap* tokens_;
8547 Object& value_; 8563 Object& value_;
8548 Smi& fresh_index_smi_; 8564 Smi& fresh_index_smi_;
8565 intptr_t num_tokens_collected_;
8549 8566
8550 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData); 8567 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData);
8551 }; 8568 };
8552 8569
8553 8570
8554 RawTokenStream* TokenStream::New(const Scanner::GrowableTokenStream& tokens, 8571 RawTokenStream* TokenStream::New(const String& source,
8555 const String& private_key, 8572 const String& private_key,
8556 bool use_shared_tokens) { 8573 bool use_shared_tokens) {
8557 Thread* thread = Thread::Current(); 8574 Thread* thread = Thread::Current();
8558 Zone* zone = thread->zone(); 8575 Zone* zone = thread->zone();
8559 // Copy the relevant data out of the scanner into a compressed stream of
8560 // tokens.
8561 8576
8562 GrowableObjectArray& token_objects = GrowableObjectArray::Handle(zone); 8577 GrowableObjectArray& token_objects = GrowableObjectArray::Handle(zone);
8563 Array& token_objects_map = Array::Handle(zone); 8578 Array& token_objects_map = Array::Handle(zone);
8564 if (use_shared_tokens) { 8579 if (use_shared_tokens) {
8565 // Use the shared token objects array in the object store. Allocate 8580 // Use the shared token objects array in the object store. Allocate
8566 // a new array if necessary. 8581 // a new array if necessary.
8567 ObjectStore* store = thread->isolate()->object_store(); 8582 ObjectStore* store = thread->isolate()->object_store();
8568 if (store->token_objects() == GrowableObjectArray::null()) { 8583 if (store->token_objects() == GrowableObjectArray::null()) {
8569 OpenSharedTokenList(thread->isolate()); 8584 OpenSharedTokenList(thread->isolate());
8570 } 8585 }
8571 token_objects = store->token_objects(); 8586 token_objects = store->token_objects();
8572 token_objects_map = store->token_objects_map(); 8587 token_objects_map = store->token_objects_map();
8573 } else { 8588 } else {
8574 // Use new, non-shared token array. 8589 // Use new, non-shared token array.
8575 const int kInitialPrivateCapacity = 256; 8590 const int kInitialPrivateCapacity = 256;
8576 token_objects = 8591 token_objects =
8577 GrowableObjectArray::New(kInitialPrivateCapacity, Heap::kOld); 8592 GrowableObjectArray::New(kInitialPrivateCapacity, Heap::kOld);
8578 token_objects_map = 8593 token_objects_map =
8579 HashTables::New<CompressedTokenMap>(kInitialPrivateCapacity, 8594 HashTables::New<CompressedTokenMap>(kInitialPrivateCapacity,
8580 Heap::kOld); 8595 Heap::kOld);
8581 } 8596 }
8582 CompressedTokenMap map(token_objects_map.raw()); 8597 CompressedTokenMap map(token_objects_map.raw());
8583 CompressedTokenStreamData data(token_objects, &map); 8598 CompressedTokenStreamData data(token_objects, &map);
8584 8599 Scanner scanner(source, private_key);
8585 intptr_t len = tokens.length(); 8600 scanner.ScanAll(&data);
8586 for (intptr_t i = 0; i < len; i++) { 8601 INC_STAT(thread, num_tokens_scanned, data.NumTokens());
8587 Scanner::TokenDescriptor token = tokens[i];
8588 if (token.kind == Token::kIDENT) { // Identifier token.
8589 data.AddIdentToken(*token.literal);
8590 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token.
8591 data.AddLiteralToken(token);
8592 } else { // Keyword, pseudo keyword etc.
8593 ASSERT(token.kind < Token::kNumTokens);
8594 data.AddSimpleToken(token.kind);
8595 }
8596 }
8597 data.AddSimpleToken(Token::kEOS); // End of stream.
8598 8602
8599 // Create and setup the token stream object. 8603 // Create and setup the token stream object.
8600 const ExternalTypedData& stream = ExternalTypedData::Handle( 8604 const ExternalTypedData& stream = ExternalTypedData::Handle(
8601 zone, 8605 zone,
8602 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, 8606 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid,
8603 data.GetStream(), data.Length(), Heap::kOld)); 8607 data.GetStream(), data.Length(), Heap::kOld));
8604 stream.AddFinalizer(data.GetStream(), DataFinalizer); 8608 stream.AddFinalizer(data.GetStream(), DataFinalizer);
8605 const TokenStream& result = TokenStream::Handle(zone, New()); 8609 const TokenStream& result = TokenStream::Handle(zone, New());
8606 result.SetPrivateKey(private_key); 8610 result.SetPrivateKey(private_key);
8607 { 8611 {
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
8935 Zone* zone = thread->zone(); 8939 Zone* zone = thread->zone();
8936 const TokenStream& tkns = TokenStream::Handle(zone, tokens()); 8940 const TokenStream& tkns = TokenStream::Handle(zone, tokens());
8937 if (!tkns.IsNull()) { 8941 if (!tkns.IsNull()) {
8938 // Already tokenized. 8942 // Already tokenized.
8939 return; 8943 return;
8940 } 8944 }
8941 // Get the source, scan and allocate the token stream. 8945 // Get the source, scan and allocate the token stream.
8942 VMTagScope tagScope(thread, VMTag::kCompileScannerTagId); 8946 VMTagScope tagScope(thread, VMTag::kCompileScannerTagId);
8943 CSTAT_TIMER_SCOPE(thread, scanner_timer); 8947 CSTAT_TIMER_SCOPE(thread, scanner_timer);
8944 const String& src = String::Handle(zone, Source()); 8948 const String& src = String::Handle(zone, Source());
8945 Scanner scanner(src, private_key); 8949 const TokenStream& ts = TokenStream::Handle(zone,
8946 const Scanner::GrowableTokenStream& ts = scanner.GetStream(); 8950 TokenStream::New(src, private_key, use_shared_tokens));
8947 INC_STAT(thread, num_tokens_scanned, ts.length()); 8951 set_tokens(ts);
8948 set_tokens(TokenStream::Handle(zone,
8949 TokenStream::New(ts, private_key, use_shared_tokens)));
8950 INC_STAT(thread, src_length, src.Length()); 8952 INC_STAT(thread, src_length, src.Length());
8951 } 8953 }
8952 8954
8953 8955
8954 void Script::SetLocationOffset(intptr_t line_offset, 8956 void Script::SetLocationOffset(intptr_t line_offset,
8955 intptr_t col_offset) const { 8957 intptr_t col_offset) const {
8956 ASSERT(line_offset >= 0); 8958 ASSERT(line_offset >= 0);
8957 ASSERT(col_offset >= 0); 8959 ASSERT(col_offset >= 0);
8958 StoreNonPointer(&raw_ptr()->line_offset_, line_offset); 8960 StoreNonPointer(&raw_ptr()->line_offset_, line_offset);
8959 StoreNonPointer(&raw_ptr()->col_offset_, col_offset); 8961 StoreNonPointer(&raw_ptr()->col_offset_, col_offset);
(...skipping 13492 matching lines...) Expand 10 before | Expand all | Expand 10 after
22452 return UserTag::null(); 22454 return UserTag::null();
22453 } 22455 }
22454 22456
22455 22457
22456 const char* UserTag::ToCString() const { 22458 const char* UserTag::ToCString() const {
22457 const String& tag_label = String::Handle(label()); 22459 const String& tag_label = String::Handle(label());
22458 return tag_label.ToCString(); 22460 return tag_label.ToCString();
22459 } 22461 }
22460 22462
22461 } // namespace dart 22463 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/object.h ('k') | runtime/vm/object_test.cc » ('j') | runtime/vm/scanner_test.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698