1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """An implementation of the server side of the Chromium sync protocol.
7 The details of the protocol are described mostly by comments in the protocol
8 buffer definition at chrome/browser/sync/protocol/sync.proto.
14 import google.protobuf.text_format
26 import app_list_specifics_pb2
27 import app_notification_specifics_pb2
28 import app_setting_specifics_pb2
29 import app_specifics_pb2
30 import article_specifics_pb2
31 import autofill_specifics_pb2
32 import bookmark_specifics_pb2
33 import client_commands_pb2
34 import dictionary_specifics_pb2
35 import get_updates_caller_info_pb2
36 import extension_setting_specifics_pb2
37 import extension_specifics_pb2
38 import favicon_image_specifics_pb2
39 import favicon_tracking_specifics_pb2
40 import history_delete_directive_specifics_pb2
41 import managed_user_setting_specifics_pb2
42 import managed_user_specifics_pb2
43 import managed_user_shared_setting_specifics_pb2
44 import nigori_specifics_pb2
45 import password_specifics_pb2
46 import preference_specifics_pb2
47 import priority_preference_specifics_pb2
48 import search_engine_specifics_pb2
49 import session_specifics_pb2
52 import synced_notification_data_pb2
53 import synced_notification_render_pb2
54 import synced_notification_specifics_pb2
55 import theme_specifics_pb2
56 import typed_url_specifics_pb2
58 # An enumeration of the various kinds of data that can be synced.
59 # Over the wire, this enumeration is not used: a sync object's type is
60 # inferred by which EntitySpecifics field it has. But in the context
61 # of a program, it is useful to have an enumeration.
63 TOP_LEVEL, # The type of the 'Google Chrome' folder.
76 HISTORY_DELETE_DIRECTIVE,
78 MANAGED_USER_SHARED_SETTING,
91 FAVICON_TRACKING) = range(29)
93 # An enumeration on the frequency at which the server should send errors
94 # to the client. This would be specified by the url that triggers the error.
95 # Note: This enum should be kept in the same order as the enum in sync_test.h.
96 SYNC_ERROR_FREQUENCY = (
98 ERROR_FREQUENCY_ALWAYS,
99 ERROR_FREQUENCY_TWO_THIRDS) = range(3)
101 # Well-known server tag of the top level 'Google Chrome' folder.
102 TOP_LEVEL_FOLDER_TAG = 'google_chrome'
104 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
105 # to that datatype. Note that TOP_LEVEL has no such token.
106 SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name
107 SYNC_TYPE_TO_DESCRIPTOR = {
108 APP_LIST: SYNC_TYPE_FIELDS['app_list'],
109 APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'],
110 APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'],
111 APPS: SYNC_TYPE_FIELDS['app'],
112 ARTICLE: SYNC_TYPE_FIELDS['article'],
113 AUTOFILL: SYNC_TYPE_FIELDS['autofill'],
114 AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'],
115 BOOKMARK: SYNC_TYPE_FIELDS['bookmark'],
116 DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'],
117 DICTIONARY: SYNC_TYPE_FIELDS['dictionary'],
118 EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'],
119 EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'],
120 EXTENSIONS: SYNC_TYPE_FIELDS['extension'],
121 FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'],
122 FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'],
123 HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'],
124 MANAGED_USER_SHARED_SETTING:
125 SYNC_TYPE_FIELDS['managed_user_shared_setting'],
126 MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'],
127 MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'],
128 NIGORI: SYNC_TYPE_FIELDS['nigori'],
129 PASSWORD: SYNC_TYPE_FIELDS['password'],
130 PREFERENCE: SYNC_TYPE_FIELDS['preference'],
131 PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'],
132 SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'],
133 SESSION: SYNC_TYPE_FIELDS['session'],
134 SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"],
135 THEME: SYNC_TYPE_FIELDS['theme'],
136 TYPED_URL: SYNC_TYPE_FIELDS['typed_url'],
139 # The parent ID used to indicate a top-level node.
142 # Unix time epoch +1 day in struct_time format. The tuple corresponds to
143 # UTC Thursday Jan 2 1970, 00:00:00, non-dst.
144 # We have to add one day after start of epoch, since in timezones with positive
145 # UTC offset time.mktime throws an OverflowError,
146 # rather then returning negative number.
147 FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0)
148 ONE_DAY_SECONDS = 60 * 60 * 24
150 # The number of characters in the server-generated encryption key.
151 KEYSTORE_KEY_LENGTH = 16
153 # The hashed client tags for some experiment nodes.
154 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA="
155 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
157 class Error(Exception):
158 """Error class for this module."""
161 class ProtobufDataTypeFieldNotUnique(Error):
162 """An entry should not have more than one data type present."""
165 class DataTypeIdNotRecognized(Error):
166 """The requested data type is not recognized."""
169 class MigrationDoneError(Error):
170 """A server-side migration occurred; clients must re-sync some datatypes.
173 datatypes: a list of the datatypes (python enum) needing migration.
176 def __init__(self, datatypes):
177 self.datatypes = datatypes
180 class StoreBirthdayError(Error):
181 """The client sent a birthday that doesn't correspond to this server."""
184 class TransientError(Error):
185 """The client would be sent a transient error."""
188 class SyncInducedError(Error):
189 """The client would be sent an error."""
192 class InducedErrorFrequencyNotDefined(Error):
193 """The error frequency defined is not handled."""
196 class ClientNotConnectedError(Error):
197 """The client is not connected to the server."""
200 def GetEntryType(entry):
201 """Extract the sync type from a SyncEntry.
204 entry: A SyncEntity protobuf object whose type to determine.
206 An enum value from ALL_TYPES if the entry's type can be determined, or None
207 if the type cannot be determined.
209 ProtobufDataTypeFieldNotUnique: More than one type was indicated by
212 if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG:
214 entry_types = GetEntryTypesFromSpecifics(entry.specifics)
218 # If there is more than one, either there's a bug, or else the caller
219 # should use GetEntryTypes.
220 if len(entry_types) > 1:
221 raise ProtobufDataTypeFieldNotUnique
222 return entry_types[0]
225 def GetEntryTypesFromSpecifics(specifics):
226 """Determine the sync types indicated by an EntitySpecifics's field(s).
228 If the specifics have more than one recognized data type field (as commonly
229 happens with the requested_types field of GetUpdatesMessage), all types
230 will be returned. Callers must handle the possibility of the returned
231 value having more than one item.
234 specifics: A EntitySpecifics protobuf message whose extensions to
237 A list of the sync types (values from ALL_TYPES) associated with each
238 recognized extension of the specifics message.
240 return [data_type for data_type, field_descriptor
241 in SYNC_TYPE_TO_DESCRIPTOR.iteritems()
242 if specifics.HasField(field_descriptor.name)]
245 def SyncTypeToProtocolDataTypeId(data_type):
246 """Convert from a sync type (python enum) to the protocol's data type id."""
247 return SYNC_TYPE_TO_DESCRIPTOR[data_type].number
250 def ProtocolDataTypeIdToSyncType(protocol_data_type_id):
251 """Convert from the protocol's data type id to a sync type (python enum)."""
252 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
253 if field_descriptor.number == protocol_data_type_id:
255 raise DataTypeIdNotRecognized
258 def DataTypeStringToSyncTypeLoose(data_type_string):
259 """Converts a human-readable string to a sync type (python enum).
261 Capitalization and pluralization don't matter; this function is appropriate
262 for values that might have been typed by a human being; e.g., command-line
263 flags or query parameters.
265 if data_type_string.isdigit():
266 return ProtocolDataTypeIdToSyncType(int(data_type_string))
267 name = data_type_string.lower().rstrip('s')
268 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
269 if field_descriptor.name.lower().rstrip('s') == name:
271 raise DataTypeIdNotRecognized
274 def MakeNewKeystoreKey():
275 """Returns a new random keystore key."""
276 return ''.join(random.choice(string.ascii_uppercase + string.digits)
277 for x in xrange(KEYSTORE_KEY_LENGTH))
280 def SyncTypeToString(data_type):
281 """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
282 return SYNC_TYPE_TO_DESCRIPTOR[data_type].name
285 def CallerInfoToString(caller_info_source):
286 """Formats a GetUpdatesSource enum value to a readable string."""
287 return get_updates_caller_info_pb2.GetUpdatesCallerInfo \
288 .DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \
289 .values_by_number[caller_info_source].name
292 def ShortDatatypeListSummary(data_types):
293 """Formats compactly a list of sync types (python enums) for human eyes.
295 This function is intended for use by logging. If the list of datatypes
296 contains almost all of the values, the return value will be expressed
297 in terms of the datatypes that aren't set.
299 included = set(data_types) - set([TOP_LEVEL])
302 excluded = set(ALL_TYPES) - included - set([TOP_LEVEL])
305 simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included]))
306 all_but_text = 'all except %s' % (
307 '+'.join(sorted([SyncTypeToString(x) for x in excluded])))
308 if len(included) < len(excluded) or len(simple_text) <= len(all_but_text):
314 def GetDefaultEntitySpecifics(data_type):
315 """Get an EntitySpecifics having a sync type's default field value."""
316 specifics = sync_pb2.EntitySpecifics()
317 if data_type in SYNC_TYPE_TO_DESCRIPTOR:
318 descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type]
319 getattr(specifics, descriptor.name).SetInParent()
323 class PermanentItem(object):
324 """A specification of one server-created permanent item.
327 tag: A known-to-the-client value that uniquely identifies a server-created
329 name: The human-readable display name for this item.
330 parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
331 a top-level item. Otherwise, this must be the tag value of some other
332 server-created permanent item.
333 sync_type: A value from ALL_TYPES, giving the datatype of this permanent
334 item. This controls which types of client GetUpdates requests will
335 cause the permanent item to be created and returned.
336 create_by_default: Whether the permanent item is created at startup or not.
337 This value is set to True in the default case. Non-default permanent items
338 are those that are created only when a client explicitly tells the server
342 def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True):
345 self.parent_tag = parent_tag
346 self.sync_type = sync_type
347 self.create_by_default = create_by_default
350 class MigrationHistory(object):
351 """A record of the migration events associated with an account.
353 Each migration event invalidates one or more datatypes on all clients
354 that had synced the datatype before the event. Such clients will continue
355 to receive MigrationDone errors until they throw away their progress and
356 re-sync that datatype from the beginning.
359 self._migrations = {}
360 for datatype in ALL_TYPES:
361 self._migrations[datatype] = [1]
362 self._next_migration_version = 2
364 def GetLatestVersion(self, datatype):
365 return self._migrations[datatype][-1]
367 def CheckAllCurrent(self, versions_map):
368 """Raises an error if any the provided versions are out of date.
370 This function intentionally returns migrations in the order that they were
371 triggered. Doing it this way allows the client to queue up two migrations
372 in a row, so the second one is received while responding to the first.
375 version_map: a map whose keys are datatypes and whose values are versions.
378 MigrationDoneError: if a mismatch is found.
381 for datatype, client_migration in versions_map.iteritems():
382 for server_migration in self._migrations[datatype]:
383 if client_migration < server_migration:
384 problems.setdefault(server_migration, []).append(datatype)
386 raise MigrationDoneError(problems[min(problems.keys())])
388 def Bump(self, datatypes):
389 """Add a record of a migration, to cause errors on future requests."""
390 for idx, datatype in enumerate(datatypes):
391 self._migrations[datatype].append(self._next_migration_version)
392 self._next_migration_version += 1
395 class UpdateSieve(object):
396 """A filter to remove items the client has already seen."""
397 def __init__(self, request, migration_history=None):
398 self._original_request = request
400 self._migration_history = migration_history or MigrationHistory()
401 self._migration_versions_to_check = {}
402 if request.from_progress_marker:
403 for marker in request.from_progress_marker:
404 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
405 if marker.HasField('timestamp_token_for_migration'):
406 timestamp = marker.timestamp_token_for_migration
408 self._migration_versions_to_check[data_type] = 1
410 (timestamp, version) = pickle.loads(marker.token)
411 self._migration_versions_to_check[data_type] = version
412 elif marker.HasField('token'):
415 raise ValueError('No timestamp information in progress marker.')
416 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
417 self._state[data_type] = timestamp
418 elif request.HasField('from_timestamp'):
419 for data_type in GetEntryTypesFromSpecifics(request.requested_types):
420 self._state[data_type] = request.from_timestamp
421 self._migration_versions_to_check[data_type] = 1
423 self._state[TOP_LEVEL] = min(self._state.itervalues())
425 def SummarizeRequest(self):
427 for data_type, timestamp in self._state.iteritems():
428 if data_type == TOP_LEVEL:
430 timestamps.setdefault(timestamp, []).append(data_type)
431 return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp)
432 for stamp, types in sorted(timestamps.iteritems()))
434 def CheckMigrationState(self):
435 self._migration_history.CheckAllCurrent(self._migration_versions_to_check)
437 def ClientWantsItem(self, item):
438 """Return true if the client hasn't already seen an item."""
439 return self._state.get(GetEntryType(item), sys.maxint) < item.version
441 def HasAnyTimestamp(self):
442 """Return true if at least one datatype was requested."""
443 return bool(self._state)
445 def GetMinTimestamp(self):
446 """Return true the smallest timestamp requested across all datatypes."""
447 return min(self._state.itervalues())
449 def GetFirstTimeTypes(self):
450 """Return a list of datatypes requesting updates from timestamp zero."""
451 return [datatype for datatype, timestamp in self._state.iteritems()
454 def GetCreateMobileBookmarks(self):
455 """Return true if the client has requested to create the 'Mobile Bookmarks'
458 return (self._original_request.HasField('create_mobile_bookmarks_folder')
459 and self._original_request.create_mobile_bookmarks_folder)
461 def SaveProgress(self, new_timestamp, get_updates_response):
462 """Write the new_timestamp or new_progress_marker fields to a response."""
463 if self._original_request.from_progress_marker:
464 for data_type, old_timestamp in self._state.iteritems():
465 if data_type == TOP_LEVEL:
467 new_marker = sync_pb2.DataTypeProgressMarker()
468 new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type)
469 final_stamp = max(old_timestamp, new_timestamp)
470 final_migration = self._migration_history.GetLatestVersion(data_type)
471 new_marker.token = pickle.dumps((final_stamp, final_migration))
472 get_updates_response.new_progress_marker.add().MergeFrom(new_marker)
473 elif self._original_request.HasField('from_timestamp'):
474 if self._original_request.from_timestamp < new_timestamp:
475 get_updates_response.new_timestamp = new_timestamp
478 class SyncDataModel(object):
479 """Models the account state of one sync user."""
482 # Specify all the permanent items that a model might need.
483 _PERMANENT_ITEM_SPECS = [
484 PermanentItem('google_chrome_apps', name='Apps',
485 parent_tag=ROOT_ID, sync_type=APPS),
486 PermanentItem('google_chrome_app_list', name='App List',
487 parent_tag=ROOT_ID, sync_type=APP_LIST),
488 PermanentItem('google_chrome_app_notifications', name='App Notifications',
489 parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION),
490 PermanentItem('google_chrome_app_settings',
492 parent_tag=ROOT_ID, sync_type=APP_SETTINGS),
493 PermanentItem('google_chrome_bookmarks', name='Bookmarks',
494 parent_tag=ROOT_ID, sync_type=BOOKMARK),
495 PermanentItem('bookmark_bar', name='Bookmark Bar',
496 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
497 PermanentItem('other_bookmarks', name='Other Bookmarks',
498 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
499 PermanentItem('synced_bookmarks', name='Synced Bookmarks',
500 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK,
501 create_by_default=False),
502 PermanentItem('google_chrome_autofill', name='Autofill',
503 parent_tag=ROOT_ID, sync_type=AUTOFILL),
504 PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles',
505 parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE),
506 PermanentItem('google_chrome_device_info', name='Device Info',
507 parent_tag=ROOT_ID, sync_type=DEVICE_INFO),
508 PermanentItem('google_chrome_experiments', name='Experiments',
509 parent_tag=ROOT_ID, sync_type=EXPERIMENTS),
510 PermanentItem('google_chrome_extension_settings',
511 name='Extension Settings',
512 parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS),
513 PermanentItem('google_chrome_extensions', name='Extensions',
514 parent_tag=ROOT_ID, sync_type=EXTENSIONS),
515 PermanentItem('google_chrome_history_delete_directives',
516 name='History Delete Directives',
518 sync_type=HISTORY_DELETE_DIRECTIVE),
519 PermanentItem('google_chrome_favicon_images',
520 name='Favicon Images',
522 sync_type=FAVICON_IMAGES),
523 PermanentItem('google_chrome_favicon_tracking',
524 name='Favicon Tracking',
526 sync_type=FAVICON_TRACKING),
527 PermanentItem('google_chrome_managed_user_settings',
528 name='Managed User Settings',
529 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING),
530 PermanentItem('google_chrome_managed_users',
531 name='Managed Users',
532 parent_tag=ROOT_ID, sync_type=MANAGED_USER),
533 PermanentItem('google_chrome_managed_user_shared_settings',
534 name='Managed User Shared Settings',
535 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SHARED_SETTING),
536 PermanentItem('google_chrome_nigori', name='Nigori',
537 parent_tag=ROOT_ID, sync_type=NIGORI),
538 PermanentItem('google_chrome_passwords', name='Passwords',
539 parent_tag=ROOT_ID, sync_type=PASSWORD),
540 PermanentItem('google_chrome_preferences', name='Preferences',
541 parent_tag=ROOT_ID, sync_type=PREFERENCE),
542 PermanentItem('google_chrome_priority_preferences',
543 name='Priority Preferences',
544 parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE),
545 PermanentItem('google_chrome_synced_notifications',
546 name='Synced Notifications',
547 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION),
548 PermanentItem('google_chrome_search_engines', name='Search Engines',
549 parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE),
550 PermanentItem('google_chrome_sessions', name='Sessions',
551 parent_tag=ROOT_ID, sync_type=SESSION),
552 PermanentItem('google_chrome_themes', name='Themes',
553 parent_tag=ROOT_ID, sync_type=THEME),
554 PermanentItem('google_chrome_typed_urls', name='Typed URLs',
555 parent_tag=ROOT_ID, sync_type=TYPED_URL),
556 PermanentItem('google_chrome_dictionary', name='Dictionary',
557 parent_tag=ROOT_ID, sync_type=DICTIONARY),
558 PermanentItem('google_chrome_articles', name='Articles',
559 parent_tag=ROOT_ID, sync_type=ARTICLE),
563 # Monotonically increasing version number. The next object change will
564 # take on this value + 1.
567 # The definitive copy of this client's items: a map from ID string to a
568 # SyncEntity protocol buffer.
571 self.ResetStoreBirthday()
572 self.migration_history = MigrationHistory()
573 self.induced_error = sync_pb2.ClientToServerResponse.Error()
574 self.induced_error_frequency = 0
575 self.sync_count_before_errors = 0
576 self.acknowledge_managed_users = False
577 self._keys = [MakeNewKeystoreKey()]
579 def _SaveEntry(self, entry):
580 """Insert or update an entry in the change log, and give it a new version.
582 The ID fields of this entry are assumed to be valid server IDs. This
583 entry will be updated with a new version number and sync_timestamp.
586 entry: The entry to be added or updated.
589 # Maintain a global (rather than per-item) sequence number and use it
590 # both as the per-entry version as well as the update-progress timestamp.
591 # This simulates the behavior of the original server implementation.
592 entry.version = self._version
593 entry.sync_timestamp = self._version
595 # Preserve the originator info, which the client is not required to send
597 base_entry = self._entries.get(entry.id_string)
599 entry.originator_cache_guid = base_entry.originator_cache_guid
600 entry.originator_client_item_id = base_entry.originator_client_item_id
602 self._entries[entry.id_string] = copy.deepcopy(entry)
604 def _ServerTagToId(self, tag):
605 """Determine the server ID from a server-unique tag.
607 The resulting value is guaranteed not to collide with the other ID
611 datatype: The sync type (python enum) of the identified object.
612 tag: The unique, known-to-the-client tag of a server-generated item.
614 The string value of the computed server ID.
616 if not tag or tag == ROOT_ID:
618 spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0]
619 return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag)
621 def _ClientTagToId(self, datatype, tag):
622 """Determine the server ID from a client-unique tag.
624 The resulting value is guaranteed not to collide with the other ID
628 datatype: The sync type (python enum) of the identified object.
629 tag: The unique, opaque-to-the-server tag of a client-tagged item.
631 The string value of the computed server ID.
633 return self._MakeCurrentId(datatype, '<client tag>%s' % tag)
635 def _ClientIdToId(self, datatype, client_guid, client_item_id):
636 """Compute a unique server ID from a client-local ID tag.
638 The resulting value is guaranteed not to collide with the other ID
642 datatype: The sync type (python enum) of the identified object.
643 client_guid: A globally unique ID that identifies the client which
645 client_item_id: An ID that uniquely identifies this item on the client
648 The string value of the computed server ID.
650 # Using the client ID info is not required here (we could instead generate
651 # a random ID), but it's useful for debugging.
652 return self._MakeCurrentId(datatype,
653 '<server ID originally>%s/%s' % (client_guid, client_item_id))
655 def _MakeCurrentId(self, datatype, inner_id):
656 return '%d^%d^%s' % (datatype,
657 self.migration_history.GetLatestVersion(datatype),
660 def _ExtractIdInfo(self, id_string):
661 if not id_string or id_string == ROOT_ID:
663 datatype_string, separator, remainder = id_string.partition('^')
664 migration_version_string, separator, inner_id = remainder.partition('^')
665 return (int(datatype_string), int(migration_version_string), inner_id)
667 def _WritePosition(self, entry, parent_id):
668 """Ensure the entry has an absolute, numeric position and parent_id.
670 Historically, clients would specify positions using the predecessor-based
671 references in the insert_after_item_id field; starting July 2011, this
672 was changed and Chrome now sends up the absolute position. The server
673 must store a position_in_parent value and must not maintain
674 insert_after_item_id.
675 Starting in Jan 2013, the client will also send up a unique_position field
676 which should be saved and returned on subsequent GetUpdates.
679 entry: The entry for which to write a position. Its ID field are
680 assumed to be server IDs. This entry will have its parent_id_string,
681 position_in_parent and unique_position fields updated; its
682 insert_after_item_id field will be cleared.
683 parent_id: The ID of the entry intended as the new parent.
686 entry.parent_id_string = parent_id
687 if not entry.HasField('position_in_parent'):
688 entry.position_in_parent = 1337 # A debuggable, distinctive default.
689 entry.ClearField('insert_after_item_id')
691 def _ItemExists(self, id_string):
692 """Determine whether an item exists in the changelog."""
693 return id_string in self._entries
695 def _CreatePermanentItem(self, spec):
696 """Create one permanent item from its spec, if it doesn't exist.
698 The resulting item is added to the changelog.
701 spec: A PermanentItem object holding the properties of the item to create.
703 id_string = self._ServerTagToId(spec.tag)
704 if self._ItemExists(id_string):
706 print 'Creating permanent item: %s' % spec.name
707 entry = sync_pb2.SyncEntity()
708 entry.id_string = id_string
709 entry.non_unique_name = spec.name
710 entry.name = spec.name
711 entry.server_defined_unique_tag = spec.tag
713 entry.deleted = False
714 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
715 self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
716 self._SaveEntry(entry)
718 def _CreateDefaultPermanentItems(self, requested_types):
719 """Ensure creation of all default permanent items for a given set of types.
722 requested_types: A list of sync data types from ALL_TYPES.
723 All default permanent items of only these types will be created.
725 for spec in self._PERMANENT_ITEM_SPECS:
726 if spec.sync_type in requested_types and spec.create_by_default:
727 self._CreatePermanentItem(spec)
729 def ResetStoreBirthday(self):
730 """Resets the store birthday to a random value."""
731 # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
732 self.store_birthday = '%0.30f' % random.random()
734 def StoreBirthday(self):
735 """Gets the store birthday."""
736 return self.store_birthday
738 def GetChanges(self, sieve):
739 """Get entries which have changed, oldest first.
741 The returned entries are limited to being _BATCH_SIZE many. The entries
742 are returned in strict version order.
745 sieve: An update sieve to use to filter out updates the client
748 A tuple of (version, entries, changes_remaining). Version is a new
749 timestamp value, which should be used as the starting point for the
750 next query. Entries is the batch of entries meeting the current
751 timestamp query. Changes_remaining indicates the number of changes
752 left on the server after this batch.
754 if not sieve.HasAnyTimestamp():
756 min_timestamp = sieve.GetMinTimestamp()
757 first_time_types = sieve.GetFirstTimeTypes()
758 self._CreateDefaultPermanentItems(first_time_types)
759 # Mobile bookmark folder is not created by default, create it only when
760 # client requested it.
761 if (sieve.GetCreateMobileBookmarks() and
762 first_time_types.count(BOOKMARK) > 0):
763 self.TriggerCreateSyncedBookmarks()
765 self.TriggerAcknowledgeManagedUsers()
767 change_log = sorted(self._entries.values(),
768 key=operator.attrgetter('version'))
769 new_changes = [x for x in change_log if x.version > min_timestamp]
770 # Pick batch_size new changes, and then filter them. This matches
771 # the RPC behavior of the production sync server.
772 batch = new_changes[:self._BATCH_SIZE]
774 # Client is up to date.
775 return (min_timestamp, [], 0)
777 # Restrict batch to requested types. Tombstones are untyped
778 # and will always get included.
779 filtered = [copy.deepcopy(item) for item in batch
780 if item.deleted or sieve.ClientWantsItem(item)]
782 # The new client timestamp is the timestamp of the last item in the
783 # batch, even if that item was filtered out.
784 return (batch[-1].version, filtered, len(new_changes) - len(batch))
786 def GetKeystoreKeys(self):
787 """Returns the encryption keys for this account."""
788 print "Returning encryption keys: %s" % self._keys
791 def _CopyOverImmutableFields(self, entry):
792 """Preserve immutable fields by copying pre-commit state.
795 entry: A sync entity from the client.
797 if entry.id_string in self._entries:
798 if self._entries[entry.id_string].HasField(
799 'server_defined_unique_tag'):
800 entry.server_defined_unique_tag = (
801 self._entries[entry.id_string].server_defined_unique_tag)
803 def _CheckVersionForCommit(self, entry):
804 """Perform an optimistic concurrency check on the version number.
806 Clients are only allowed to commit if they report having seen the most
807 recent version of an object.
810 entry: A sync entity from the client. It is assumed that ID fields
811 have been converted to server IDs.
813 A boolean value indicating whether the client's version matches the
814 newest server version for the given entry.
816 if entry.id_string in self._entries:
817 # Allow edits/deletes if the version matches, and any undeletion.
818 return (self._entries[entry.id_string].version == entry.version or
819 self._entries[entry.id_string].deleted)
821 # Allow unknown ID only if the client thinks it's new too.
822 return entry.version == 0
824 def _CheckParentIdForCommit(self, entry):
825 """Check that the parent ID referenced in a SyncEntity actually exists.
828 entry: A sync entity from the client. It is assumed that ID fields
829 have been converted to server IDs.
831 A boolean value indicating whether the entity's parent ID is an object
832 that actually exists (and is not deleted) in the current account state.
834 if entry.parent_id_string == ROOT_ID:
835 # This is generally allowed.
837 if entry.parent_id_string not in self._entries:
838 print 'Warning: Client sent unknown ID. Should never happen.'
840 if entry.parent_id_string == entry.id_string:
841 print 'Warning: Client sent circular reference. Should never happen.'
843 if self._entries[entry.parent_id_string].deleted:
844 # This can happen in a race condition between two clients.
846 if not self._entries[entry.parent_id_string].folder:
847 print 'Warning: Client sent non-folder parent. Should never happen.'
851 def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
852 """Convert ID fields in a client sync entry to server IDs.
854 A commit batch sent by a client may contain new items for which the
855 server has not generated IDs yet. And within a commit batch, later
856 items are allowed to refer to earlier items. This method will
857 generate server IDs for new items, as well as rewrite references
858 to items whose server IDs were generated earlier in the batch.
861 entry: The client sync entry to modify.
862 cache_guid: The globally unique ID of the client that sent this
864 commit_session: A dictionary mapping the original IDs to the new server
865 IDs, for any items committed earlier in the batch.
867 if entry.version == 0:
868 data_type = GetEntryType(entry)
869 if entry.HasField('client_defined_unique_tag'):
870 # When present, this should determine the item's ID.
871 new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag)
873 new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string)
874 entry.originator_cache_guid = cache_guid
875 entry.originator_client_item_id = entry.id_string
876 commit_session[entry.id_string] = new_id # Remember the remapping.
877 entry.id_string = new_id
878 if entry.parent_id_string in commit_session:
879 entry.parent_id_string = commit_session[entry.parent_id_string]
880 if entry.insert_after_item_id in commit_session:
881 entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
883 def ValidateCommitEntries(self, entries):
884 """Raise an exception if a commit batch contains any global errors.
887 entries: an iterable containing commit-form SyncEntity protocol buffers.
890 MigrationDoneError: if any of the entries reference a recently-migrated
893 server_ids_in_commit = set()
894 local_ids_in_commit = set()
895 for entry in entries:
897 server_ids_in_commit.add(entry.id_string)
899 local_ids_in_commit.add(entry.id_string)
900 if entry.HasField('parent_id_string'):
901 if entry.parent_id_string not in local_ids_in_commit:
902 server_ids_in_commit.add(entry.parent_id_string)
904 versions_present = {}
905 for server_id in server_ids_in_commit:
906 parsed = self._ExtractIdInfo(server_id)
908 datatype, version, _ = parsed
909 versions_present.setdefault(datatype, []).append(version)
911 self.migration_history.CheckAllCurrent(
912 dict((k, min(v)) for k, v in versions_present.iteritems()))
914 def CommitEntry(self, entry, cache_guid, commit_session):
915 """Attempt to commit one entry to the user's account.
918 entry: A SyncEntity protobuf representing desired object changes.
919 cache_guid: A string value uniquely identifying the client; this
920 is used for ID generation and will determine the originator_cache_guid
922 commit_session: A dictionary mapping client IDs to server IDs for any
923 objects committed earlier this session. If the entry gets a new ID
924 during commit, the change will be recorded here.
926 A SyncEntity reflecting the post-commit value of the entry, or None
927 if the entry was not committed due to an error.
929 entry = copy.deepcopy(entry)
931 # Generate server IDs for this entry, and write generated server IDs
932 # from earlier entries into the message's fields, as appropriate. The
933 # ID generation state is stored in 'commit_session'.
934 self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
936 # Perform the optimistic concurrency check on the entry's version number.
937 # Clients are not allowed to commit unless they indicate that they've seen
938 # the most recent version of an object.
939 if not self._CheckVersionForCommit(entry):
942 # Check the validity of the parent ID; it must exist at this point.
943 # TODO(nick): Implement cycle detection and resolution.
944 if not self._CheckParentIdForCommit(entry):
947 self._CopyOverImmutableFields(entry);
949 # At this point, the commit is definitely going to happen.
951 # Deletion works by storing a limited record for an entry, called a
952 # tombstone. A sync server must track deleted IDs forever, since it does
953 # not keep track of client knowledge (there's no deletion ACK event).
955 def MakeTombstone(id_string, datatype):
956 """Make a tombstone entry that will replace the entry being deleted.
959 id_string: Index of the SyncEntity to be deleted.
961 A new SyncEntity reflecting the fact that the entry is deleted.
963 # Only the ID, version and deletion state are preserved on a tombstone.
964 tombstone = sync_pb2.SyncEntity()
965 tombstone.id_string = id_string
966 tombstone.deleted = True
968 tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype))
971 def IsChild(child_id):
972 """Check if a SyncEntity is a child of entry, or any of its children.
975 child_id: Index of the SyncEntity that is a possible child of entry.
977 True if it is a child; false otherwise.
979 if child_id not in self._entries:
981 if self._entries[child_id].parent_id_string == entry.id_string:
983 return IsChild(self._entries[child_id].parent_id_string)
985 # Identify any children entry might have.
986 child_ids = [child.id_string for child in self._entries.itervalues()
987 if IsChild(child.id_string)]
989 # Mark all children that were identified as deleted.
990 for child_id in child_ids:
991 datatype = GetEntryType(self._entries[child_id])
992 self._SaveEntry(MakeTombstone(child_id, datatype))
994 # Delete entry itself.
995 datatype = GetEntryType(self._entries[entry.id_string])
996 entry = MakeTombstone(entry.id_string, datatype)
998 # Comments in sync.proto detail how the representation of positional
1001 # We've almost fully deprecated the 'insert_after_item_id' field.
1002 # The 'position_in_parent' field is also deprecated, but as of Jan 2013
1003 # is still in common use. The 'unique_position' field is the latest
1004 # and greatest in positioning technology.
1006 # This server supports 'position_in_parent' and 'unique_position'.
1007 self._WritePosition(entry, entry.parent_id_string)
1009 # Preserve the originator info, which the client is not required to send
1011 base_entry = self._entries.get(entry.id_string)
1012 if base_entry and not entry.HasField('originator_cache_guid'):
1013 entry.originator_cache_guid = base_entry.originator_cache_guid
1014 entry.originator_client_item_id = base_entry.originator_client_item_id
1016 # Store the current time since the Unix epoch in milliseconds.
1017 entry.mtime = (int((time.mktime(time.gmtime()) -
1018 (time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000))
1020 # Commit the change. This also updates the version number.
1021 self._SaveEntry(entry)
1024 def _RewriteVersionInId(self, id_string):
1025 """Rewrites an ID so that its migration version becomes current."""
1026 parsed_id = self._ExtractIdInfo(id_string)
1029 datatype, old_migration_version, inner_id = parsed_id
1030 return self._MakeCurrentId(datatype, inner_id)
1032 def TriggerMigration(self, datatypes):
1033 """Cause a migration to occur for a set of datatypes on this account.
1035 Clients will see the MIGRATION_DONE error for these datatypes until they
1038 versions_to_remap = self.migration_history.Bump(datatypes)
1039 all_entries = self._entries.values()
1040 self._entries.clear()
1041 for entry in all_entries:
1042 new_id = self._RewriteVersionInId(entry.id_string)
1043 entry.id_string = new_id
1044 if entry.HasField('parent_id_string'):
1045 entry.parent_id_string = self._RewriteVersionInId(
1046 entry.parent_id_string)
1047 self._entries[entry.id_string] = entry
1049 def TriggerSyncTabFavicons(self):
1050 """Set the 'sync_tab_favicons' field to this account's nigori node.
1052 If the field is not currently set, will write a new nigori node entry
1053 with the field set. Else does nothing.
1056 nigori_tag = "google_chrome_nigori"
1057 nigori_original = self._entries.get(self._ServerTagToId(nigori_tag))
1058 if (nigori_original.specifics.nigori.sync_tab_favicons):
1060 nigori_new = copy.deepcopy(nigori_original)
1061 nigori_new.specifics.nigori.sync_tabs = True
1062 self._SaveEntry(nigori_new)
1064 def TriggerCreateSyncedBookmarks(self):
1065 """Create the Synced Bookmarks folder under the Bookmarks permanent item.
1067 Clients will then receive the Synced Bookmarks folder on future
1068 GetUpdates, and new bookmarks can be added within the Synced Bookmarks
1072 synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS
1073 if spec.name == "Synced Bookmarks"]
1074 self._CreatePermanentItem(synced_bookmarks_spec)
1076 def TriggerEnableKeystoreEncryption(self):
1077 """Create the keystore_encryption experiment entity and enable it.
1079 A new entity within the EXPERIMENTS datatype is created with the unique
1080 client tag "keystore_encryption" if it doesn't already exist. The
1081 keystore_encryption message is then filled with |enabled| set to true.
1084 experiment_id = self._ServerTagToId("google_chrome_experiments")
1085 keystore_encryption_id = self._ClientTagToId(
1087 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1088 keystore_entry = self._entries.get(keystore_encryption_id)
1089 if keystore_entry is None:
1090 keystore_entry = sync_pb2.SyncEntity()
1091 keystore_entry.id_string = keystore_encryption_id
1092 keystore_entry.name = "Keystore Encryption"
1093 keystore_entry.client_defined_unique_tag = (
1094 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1095 keystore_entry.folder = False
1096 keystore_entry.deleted = False
1097 keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1098 self._WritePosition(keystore_entry, experiment_id)
1100 keystore_entry.specifics.experiments.keystore_encryption.enabled = True
1102 self._SaveEntry(keystore_entry)
1104 def TriggerRotateKeystoreKeys(self):
1105 """Rotate the current set of keystore encryption keys.
1107 |self._keys| will have a new random encryption key appended to it. We touch
1108 the nigori node so that each client will receive the new encryption keys
1112 # Add a new encryption key.
1113 self._keys += [MakeNewKeystoreKey(), ]
1115 # Increment the nigori node's timestamp, so clients will get the new keys
1116 # on their next GetUpdates (any time the nigori node is sent back, we also
1117 # send back the keystore keys).
1118 nigori_tag = "google_chrome_nigori"
1119 self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag)))
1121 def TriggerAcknowledgeManagedUsers(self):
1122 """Set the "acknowledged" flag for any managed user entities that don't have
1126 if not self.acknowledge_managed_users:
1129 managed_users = [copy.deepcopy(entry) for entry in self._entries.values()
1130 if entry.specifics.HasField('managed_user')
1131 and not entry.specifics.managed_user.acknowledged]
1132 for user in managed_users:
1133 user.specifics.managed_user.acknowledged = True
1134 self._SaveEntry(user)
1136 def TriggerEnablePreCommitGetUpdateAvoidance(self):
1137 """Sets the experiment to enable pre-commit GetUpdate avoidance."""
1138 experiment_id = self._ServerTagToId("google_chrome_experiments")
1139 pre_commit_gu_avoidance_id = self._ClientTagToId(
1141 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG)
1142 entry = self._entries.get(pre_commit_gu_avoidance_id)
1144 entry = sync_pb2.SyncEntity()
1145 entry.id_string = pre_commit_gu_avoidance_id
1146 entry.name = "Pre-commit GU avoidance"
1147 entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
1148 entry.folder = False
1149 entry.deleted = False
1150 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1151 self._WritePosition(entry, experiment_id)
1152 entry.specifics.experiments.pre_commit_update_avoidance.enabled = True
1153 self._SaveEntry(entry)
1155 def SetInducedError(self, error, error_frequency,
1156 sync_count_before_errors):
1157 self.induced_error = error
1158 self.induced_error_frequency = error_frequency
1159 self.sync_count_before_errors = sync_count_before_errors
1161 def GetInducedError(self):
1162 return self.induced_error
1164 def AddSyncedNotification(self, serialized_notification):
1165 """Adds a synced notification to the server data.
1167 The notification will be delivered to the client on the next GetUpdates
1171 serialized_notification: A serialized CoalescedSyncedNotification.
1174 The string representation of the added SyncEntity.
1177 ClientNotConnectedError: if the client has not yet connected to this
1180 # A unique string used wherever a unique ID for this notification is
1182 unique_notification_id = str(uuid.uuid4())
1184 specifics = self._CreateSyncedNotificationEntitySpecifics(
1185 unique_notification_id, serialized_notification)
1187 # Create the root SyncEntity representing a single notification.
1188 entity = sync_pb2.SyncEntity()
1189 entity.specifics.CopyFrom(specifics)
1190 entity.parent_id_string = self._ServerTagToId(
1191 'google_chrome_synced_notifications')
1192 entity.name = 'Synced notification added for testing'
1193 entity.server_defined_unique_tag = unique_notification_id
1195 # Set the version to one more than the greatest version number already seen.
1196 entries = sorted(self._entries.values(), key=operator.attrgetter('version'))
1197 if len(entries) < 1:
1198 raise ClientNotConnectedError
1199 entity.version = entries[-1].version + 1
1201 entity.client_defined_unique_tag = self._CreateSyncedNotificationClientTag(
1202 specifics.synced_notification.coalesced_notification.key)
1203 entity.id_string = self._ClientTagToId(GetEntryType(entity),
1204 entity.client_defined_unique_tag)
1206 self._entries[entity.id_string] = copy.deepcopy(entity)
1208 return google.protobuf.text_format.MessageToString(entity)
1210 def _CreateSyncedNotificationEntitySpecifics(self, unique_id,
1211 serialized_notification):
1212 """Create the EntitySpecifics proto for a synced notification."""
1213 coalesced = synced_notification_data_pb2.CoalescedSyncedNotification()
1214 google.protobuf.text_format.Merge(serialized_notification, coalesced)
1216 # Override the provided key so that we have a unique one.
1217 coalesced.key = unique_id
1219 specifics = sync_pb2.EntitySpecifics()
1220 notification_specifics = \
1221 synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1222 notification_specifics.coalesced_notification.CopyFrom(coalesced)
1223 specifics.synced_notification.CopyFrom(notification_specifics)
1228 def _CreateSyncedNotificationClientTag(self, key):
1229 """Create the client_defined_unique_tag value for a SyncedNotification.
1232 key: The entity used to create the client tag.
1235 The string value of the to be used as the client_defined_unique_tag.
1237 serialized_type = sync_pb2.EntitySpecifics()
1238 specifics = synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1239 serialized_type.synced_notification.CopyFrom(specifics)
1240 hash_input = serialized_type.SerializeToString() + key
1241 return base64.b64encode(hashlib.sha1(hash_input).digest())
1244 class TestServer(object):
1245 """An object to handle requests for one (and only one) Chrome Sync account.
1247 TestServer consumes the sync command messages that are the outermost
1248 layers of the protocol, performs the corresponding actions on its
1249 SyncDataModel, and constructs an appropriate response message.
1253 # The implementation supports exactly one account; its state is here.
1254 self.account = SyncDataModel()
1255 self.account_lock = threading.Lock()
1256 # Clients that have talked to us: a map from the full client ID
1259 self.client_name_generator = ('+' * times + chr(c)
1260 for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z')))
1261 self.transient_error = False
1263 # Gaia OAuth2 Token fields and their default values.
1264 self.response_code = 200
1265 self.request_token = 'rt1'
1266 self.access_token = 'at1'
1267 self.expires_in = 3600
1268 self.token_type = 'Bearer'
1269 # The ClientCommand to send back on each ServerToClientResponse. If set to
1270 # None, no ClientCommand should be sent.
1271 self._client_command = None
1274 def GetShortClientName(self, query):
1275 parsed = cgi.parse_qs(query[query.find('?')+1:])
1276 client_id = parsed.get('client_id')
1279 client_id = client_id[0]
1280 if client_id not in self.clients:
1281 self.clients[client_id] = self.client_name_generator.next()
1282 return self.clients[client_id]
1284 def CheckStoreBirthday(self, request):
1285 """Raises StoreBirthdayError if the request's birthday is a mismatch."""
1286 if not request.HasField('store_birthday'):
1288 if self.account.StoreBirthday() != request.store_birthday:
1289 raise StoreBirthdayError
1291 def CheckTransientError(self):
1292 """Raises TransientError if transient_error variable is set."""
1293 if self.transient_error:
1294 raise TransientError
1296 def CheckSendError(self):
1297 """Raises SyncInducedError if needed."""
1298 if (self.account.induced_error.error_type !=
1299 sync_enums_pb2.SyncEnums.UNKNOWN):
1300 # Always means return the given error for all requests.
1301 if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS:
1302 raise SyncInducedError
1303 # This means the FIRST 2 requests of every 3 requests
1304 # return an error. Don't switch the order of failures. There are
1305 # test cases that rely on the first 2 being the failure rather than
1307 elif (self.account.induced_error_frequency ==
1308 ERROR_FREQUENCY_TWO_THIRDS):
1309 if (((self.sync_count -
1310 self.account.sync_count_before_errors) % 3) != 0):
1311 raise SyncInducedError
1313 raise InducedErrorFrequencyNotDefined
1315 def HandleMigrate(self, path):
1316 query = urlparse.urlparse(path)[4]
1318 self.account_lock.acquire()
1320 datatypes = [DataTypeStringToSyncTypeLoose(x)
1321 for x in urlparse.parse_qs(query).get('type',[])]
1323 self.account.TriggerMigration(datatypes)
1324 response = 'Migrated datatypes %s' % (
1325 ' and '.join(SyncTypeToString(x).upper() for x in datatypes))
1327 response = 'Please specify one or more <i>type=name</i> parameters'
1329 except DataTypeIdNotRecognized, error:
1330 response = 'Could not interpret datatype name'
1333 self.account_lock.release()
1334 return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
1335 (code, code, response))
1337 def HandleSetInducedError(self, path):
1338 query = urlparse.urlparse(path)[4]
1339 self.account_lock.acquire()
1341 response = 'Success'
1342 error = sync_pb2.ClientToServerResponse.Error()
1344 error_type = urlparse.parse_qs(query)['error']
1345 action = urlparse.parse_qs(query)['action']
1346 error.error_type = int(error_type[0])
1347 error.action = int(action[0])
1349 error.url = (urlparse.parse_qs(query)['url'])[0]
1353 error.error_description =(
1354 (urlparse.parse_qs(query)['error_description'])[0])
1356 error.error_description = ''
1358 error_frequency = int((urlparse.parse_qs(query)['frequency'])[0])
1360 error_frequency = ERROR_FREQUENCY_ALWAYS
1361 self.account.SetInducedError(error, error_frequency, self.sync_count)
1362 response = ('Error = %d, action = %d, url = %s, description = %s' %
1363 (error.error_type, error.action,
1365 error.error_description))
1367 response = 'Could not parse url'
1370 self.account_lock.release()
1371 return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
1372 (code, code, response))
1374 def HandleCreateBirthdayError(self):
1375 self.account.ResetStoreBirthday()
1378 '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
1380 def HandleSetTransientError(self):
1381 self.transient_error = True
1384 '<html><title>Transient error</title><H1>Transient error</H1></html>')
1386 def HandleSetSyncTabFavicons(self):
1387 """Set 'sync_tab_favicons' field of the nigori node for this account."""
1388 self.account.TriggerSyncTabFavicons()
1391 '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
1393 def HandleCreateSyncedBookmarks(self):
1394 """Create the Synced Bookmarks folder under Bookmarks."""
1395 self.account.TriggerCreateSyncedBookmarks()
1398 '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
1400 def HandleEnableKeystoreEncryption(self):
1401 """Enables the keystore encryption experiment."""
1402 self.account.TriggerEnableKeystoreEncryption()
1405 '<html><title>Enable Keystore Encryption</title>'
1406 '<H1>Enable Keystore Encryption</H1></html>')
1408 def HandleRotateKeystoreKeys(self):
1409 """Rotate the keystore encryption keys."""
1410 self.account.TriggerRotateKeystoreKeys()
1413 '<html><title>Rotate Keystore Keys</title>'
1414 '<H1>Rotate Keystore Keys</H1></html>')
1416 def HandleEnableManagedUserAcknowledgement(self):
1417 """Enable acknowledging newly created managed users."""
1418 self.account.acknowledge_managed_users = True
1421 '<html><title>Enable Managed User Acknowledgement</title>'
1422 '<h1>Enable Managed User Acknowledgement</h1></html>')
1424 def HandleEnablePreCommitGetUpdateAvoidance(self):
1425 """Enables the pre-commit GU avoidance experiment."""
1426 self.account.TriggerEnablePreCommitGetUpdateAvoidance()
1429 '<html><title>Enable pre-commit GU avoidance</title>'
1430 '<H1>Enable pre-commit GU avoidance</H1></html>')
1432 def HandleCommand(self, query, raw_request):
1433 """Decode and handle a sync command from a raw input of bytes.
1435 This is the main entry point for this class. It is safe to call this
1436 method from multiple threads.
1439 raw_request: An iterable byte sequence to be interpreted as a sync
1442 A tuple (response_code, raw_response); the first value is an HTTP
1443 result code, while the second value is a string of bytes which is the
1444 serialized reply to the command.
1446 self.account_lock.acquire()
1447 self.sync_count += 1
1448 def print_context(direction):
1449 print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction,
1453 request = sync_pb2.ClientToServerMessage()
1454 request.MergeFromString(raw_request)
1455 contents = request.message_contents
1457 response = sync_pb2.ClientToServerResponse()
1458 response.error_code = sync_enums_pb2.SyncEnums.SUCCESS
1460 if self._client_command:
1461 response.client_command.CopyFrom(self._client_command)
1463 self.CheckStoreBirthday(request)
1464 response.store_birthday = self.account.store_birthday
1465 self.CheckTransientError()
1466 self.CheckSendError()
1470 if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
1471 print 'Authenticate'
1472 # We accept any authentication token, and support only one account.
1473 # TODO(nick): Mock out the GAIA authentication as well; hook up here.
1474 response.authenticate.user.email = 'syncjuser@chromium'
1475 response.authenticate.user.display_name = 'Sync J User'
1476 elif contents == sync_pb2.ClientToServerMessage.COMMIT:
1477 print 'Commit %d item(s)' % len(request.commit.entries)
1478 self.HandleCommit(request.commit, response.commit)
1479 elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
1481 self.HandleGetUpdates(request.get_updates, response.get_updates)
1483 print '%d update(s)' % len(response.get_updates.entries)
1485 print 'Unrecognizable sync request!'
1486 return (400, None) # Bad request.
1487 return (200, response.SerializeToString())
1488 except MigrationDoneError, error:
1490 print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes))
1491 response = sync_pb2.ClientToServerResponse()
1492 response.store_birthday = self.account.store_birthday
1493 response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE
1494 response.migrated_data_type_id[:] = [
1495 SyncTypeToProtocolDataTypeId(x) for x in error.datatypes]
1496 return (200, response.SerializeToString())
1497 except StoreBirthdayError, error:
1499 print 'NOT_MY_BIRTHDAY'
1500 response = sync_pb2.ClientToServerResponse()
1501 response.store_birthday = self.account.store_birthday
1502 response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY
1503 return (200, response.SerializeToString())
1504 except TransientError, error:
1505 ### This is deprecated now. Would be removed once test cases are removed.
1507 print 'TRANSIENT_ERROR'
1508 response.store_birthday = self.account.store_birthday
1509 response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR
1510 return (200, response.SerializeToString())
1511 except SyncInducedError, error:
1513 print 'INDUCED_ERROR'
1514 response.store_birthday = self.account.store_birthday
1515 error = self.account.GetInducedError()
1516 response.error.error_type = error.error_type
1517 response.error.url = error.url
1518 response.error.error_description = error.error_description
1519 response.error.action = error.action
1520 return (200, response.SerializeToString())
1522 self.account_lock.release()
1524 def HandleCommit(self, commit_message, commit_response):
1525 """Respond to a Commit request by updating the user's account state.
1527 Commit attempts stop after the first error, returning a CONFLICT result
1528 for any unattempted entries.
1531 commit_message: A sync_pb.CommitMessage protobuf holding the content
1532 of the client's request.
1533 commit_response: A sync_pb.CommitResponse protobuf into which a reply
1534 to the client request will be written.
1536 commit_response.SetInParent()
1537 batch_failure = False
1538 session = {} # Tracks ID renaming during the commit operation.
1539 guid = commit_message.cache_guid
1541 self.account.ValidateCommitEntries(commit_message.entries)
1543 for entry in commit_message.entries:
1545 if not batch_failure:
1546 # Try to commit the change to the account.
1547 server_entry = self.account.CommitEntry(entry, guid, session)
1549 # An entryresponse is returned in both success and failure cases.
1550 reply = commit_response.entryresponse.add()
1551 if not server_entry:
1552 reply.response_type = sync_pb2.CommitResponse.CONFLICT
1553 reply.error_message = 'Conflict.'
1554 batch_failure = True # One failure halts the batch.
1556 reply.response_type = sync_pb2.CommitResponse.SUCCESS
1557 # These are the properties that the server is allowed to override
1558 # during commit; the client wants to know their values at the end
1560 reply.id_string = server_entry.id_string
1561 if not server_entry.deleted:
1562 # Note: the production server doesn't actually send the
1563 # parent_id_string on commit responses, so we don't either.
1564 reply.position_in_parent = server_entry.position_in_parent
1565 reply.version = server_entry.version
1566 reply.name = server_entry.name
1567 reply.non_unique_name = server_entry.non_unique_name
1569 reply.version = entry.version + 1
1571 def HandleGetUpdates(self, update_request, update_response):
1572 """Respond to a GetUpdates request by querying the user's account.
1575 update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
1576 of the client's request.
1577 update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
1578 to the client request will be written.
1580 update_response.SetInParent()
1581 update_sieve = UpdateSieve(update_request, self.account.migration_history)
1583 print CallerInfoToString(update_request.caller_info.source),
1584 print update_sieve.SummarizeRequest()
1586 update_sieve.CheckMigrationState()
1588 new_timestamp, entries, remaining = self.account.GetChanges(update_sieve)
1590 update_response.changes_remaining = remaining
1591 sending_nigori_node = False
1592 for entry in entries:
1593 if entry.name == 'Nigori':
1594 sending_nigori_node = True
1595 reply = update_response.entries.add()
1596 reply.CopyFrom(entry)
1597 update_sieve.SaveProgress(new_timestamp, update_response)
1599 if update_request.need_encryption_key or sending_nigori_node:
1600 update_response.encryption_keys.extend(self.account.GetKeystoreKeys())
1602 def HandleGetOauth2Token(self):
1603 return (int(self.response_code),
1605 ' \"refresh_token\": \"' + self.request_token + '\",\n'
1606 ' \"access_token\": \"' + self.access_token + '\",\n'
1607 ' \"expires_in\": ' + str(self.expires_in) + ',\n'
1608 ' \"token_type\": \"' + self.token_type +'\"\n'
1611 def HandleSetOauth2Token(self, response_code, request_token, access_token,
1612 expires_in, token_type):
1613 if response_code != 0:
1614 self.response_code = response_code
1615 if request_token != '':
1616 self.request_token = request_token
1617 if access_token != '':
1618 self.access_token = access_token
1620 self.expires_in = expires_in
1621 if token_type != '':
1622 self.token_type = token_type
1625 '<html><title>Set OAuth2 Token</title>'
1626 '<H1>This server will now return the OAuth2 Token:</H1>'
1627 '<p>response_code: ' + str(self.response_code) + '</p>'
1628 '<p>request_token: ' + self.request_token + '</p>'
1629 '<p>access_token: ' + self.access_token + '</p>'
1630 '<p>expires_in: ' + str(self.expires_in) + '</p>'
1631 '<p>token_type: ' + self.token_type + '</p>'
1634 def CustomizeClientCommand(self, sessions_commit_delay_seconds):
1635 """Customizes the value of the ClientCommand of ServerToClientResponse.
1637 Currently, this only allows for changing the sessions_commit_delay_seconds
1638 field. This is useful for testing in conjunction with
1639 AddSyncedNotification so that synced notifications are seen immediately
1640 after triggering them with an HTTP call to the test server.
1643 sessions_commit_delay_seconds: The desired sync delay time for sessions.
1645 if not self._client_command:
1646 self._client_command = client_commands_pb2.ClientCommand()
1648 self._client_command.sessions_commit_delay_seconds = \
1649 sessions_commit_delay_seconds
1650 return self._client_command