• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2013 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5"""An implementation of the server side of the Chromium sync protocol.
6
7The details of the protocol are described mostly by comments in the protocol
8buffer definition at chrome/browser/sync/protocol/sync.proto.
9"""
10
11import base64
12import cgi
13import copy
14import google.protobuf.text_format
15import hashlib
16import operator
17import pickle
18import random
19import string
20import sys
21import threading
22import time
23import urlparse
24import uuid
25
26import app_list_specifics_pb2
27import app_notification_specifics_pb2
28import app_setting_specifics_pb2
29import app_specifics_pb2
30import article_specifics_pb2
31import autofill_specifics_pb2
32import bookmark_specifics_pb2
33import client_commands_pb2
34import dictionary_specifics_pb2
35import get_updates_caller_info_pb2
36import extension_setting_specifics_pb2
37import extension_specifics_pb2
38import favicon_image_specifics_pb2
39import favicon_tracking_specifics_pb2
40import history_delete_directive_specifics_pb2
41import managed_user_setting_specifics_pb2
42import managed_user_specifics_pb2
43import nigori_specifics_pb2
44import password_specifics_pb2
45import preference_specifics_pb2
46import priority_preference_specifics_pb2
47import search_engine_specifics_pb2
48import session_specifics_pb2
49import sync_pb2
50import sync_enums_pb2
51import synced_notification_data_pb2
52import synced_notification_render_pb2
53import synced_notification_specifics_pb2
54import theme_specifics_pb2
55import typed_url_specifics_pb2
56
57# An enumeration of the various kinds of data that can be synced.
58# Over the wire, this enumeration is not used: a sync object's type is
59# inferred by which EntitySpecifics field it has.  But in the context
60# of a program, it is useful to have an enumeration.
61ALL_TYPES = (
62    TOP_LEVEL,  # The type of the 'Google Chrome' folder.
63    APPS,
64    APP_LIST,
65    APP_NOTIFICATION,
66    APP_SETTINGS,
67    ARTICLE,
68    AUTOFILL,
69    AUTOFILL_PROFILE,
70    BOOKMARK,
71    DEVICE_INFO,
72    DICTIONARY,
73    EXPERIMENTS,
74    EXTENSIONS,
75    HISTORY_DELETE_DIRECTIVE,
76    MANAGED_USER_SETTING,
77    MANAGED_USER,
78    NIGORI,
79    PASSWORD,
80    PREFERENCE,
81    PRIORITY_PREFERENCE,
82    SEARCH_ENGINE,
83    SESSION,
84    SYNCED_NOTIFICATION,
85    THEME,
86    TYPED_URL,
87    EXTENSION_SETTINGS,
88    FAVICON_IMAGES,
89    FAVICON_TRACKING) = range(28)
90
91# An enumeration on the frequency at which the server should send errors
92# to the client. This would be specified by the url that triggers the error.
93# Note: This enum should be kept in the same order as the enum in sync_test.h.
94SYNC_ERROR_FREQUENCY = (
95    ERROR_FREQUENCY_NONE,
96    ERROR_FREQUENCY_ALWAYS,
97    ERROR_FREQUENCY_TWO_THIRDS) = range(3)
98
99# Well-known server tag of the top level 'Google Chrome' folder.
100TOP_LEVEL_FOLDER_TAG = 'google_chrome'
101
102# Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
103# to that datatype.  Note that TOP_LEVEL has no such token.
104SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name
105SYNC_TYPE_TO_DESCRIPTOR = {
106    APP_LIST: SYNC_TYPE_FIELDS['app_list'],
107    APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'],
108    APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'],
109    APPS: SYNC_TYPE_FIELDS['app'],
110    ARTICLE: SYNC_TYPE_FIELDS['article'],
111    AUTOFILL: SYNC_TYPE_FIELDS['autofill'],
112    AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'],
113    BOOKMARK: SYNC_TYPE_FIELDS['bookmark'],
114    DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'],
115    DICTIONARY: SYNC_TYPE_FIELDS['dictionary'],
116    EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'],
117    EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'],
118    EXTENSIONS: SYNC_TYPE_FIELDS['extension'],
119    FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'],
120    FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'],
121    HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'],
122    MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'],
123    MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'],
124    NIGORI: SYNC_TYPE_FIELDS['nigori'],
125    PASSWORD: SYNC_TYPE_FIELDS['password'],
126    PREFERENCE: SYNC_TYPE_FIELDS['preference'],
127    PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'],
128    SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'],
129    SESSION: SYNC_TYPE_FIELDS['session'],
130    SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"],
131    THEME: SYNC_TYPE_FIELDS['theme'],
132    TYPED_URL: SYNC_TYPE_FIELDS['typed_url'],
133    }
134
135# The parent ID used to indicate a top-level node.
136ROOT_ID = '0'
137
138# Unix time epoch +1 day in struct_time format. The tuple corresponds to
139# UTC Thursday Jan 2 1970, 00:00:00, non-dst.
140# We have to add one day after start of epoch, since in timezones with positive
141# UTC offset time.mktime throws an OverflowError,
142# rather then returning negative number.
143FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0)
144ONE_DAY_SECONDS = 60 * 60 * 24
145
146# The number of characters in the server-generated encryption key.
147KEYSTORE_KEY_LENGTH = 16
148
149# The hashed client tags for some experiment nodes.
150KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA="
151PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
152
153class Error(Exception):
154  """Error class for this module."""
155
156
157class ProtobufDataTypeFieldNotUnique(Error):
158  """An entry should not have more than one data type present."""
159
160
161class DataTypeIdNotRecognized(Error):
162  """The requested data type is not recognized."""
163
164
165class MigrationDoneError(Error):
166  """A server-side migration occurred; clients must re-sync some datatypes.
167
168  Attributes:
169    datatypes: a list of the datatypes (python enum) needing migration.
170  """
171
172  def __init__(self, datatypes):
173    self.datatypes = datatypes
174
175
176class StoreBirthdayError(Error):
177  """The client sent a birthday that doesn't correspond to this server."""
178
179
180class TransientError(Error):
181  """The client would be sent a transient error."""
182
183
184class SyncInducedError(Error):
185  """The client would be sent an error."""
186
187
188class InducedErrorFrequencyNotDefined(Error):
189  """The error frequency defined is not handled."""
190
191
192class ClientNotConnectedError(Error):
193  """The client is not connected to the server."""
194
195
196def GetEntryType(entry):
197  """Extract the sync type from a SyncEntry.
198
199  Args:
200    entry: A SyncEntity protobuf object whose type to determine.
201  Returns:
202    An enum value from ALL_TYPES if the entry's type can be determined, or None
203    if the type cannot be determined.
204  Raises:
205    ProtobufDataTypeFieldNotUnique: More than one type was indicated by
206    the entry.
207  """
208  if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG:
209    return TOP_LEVEL
210  entry_types = GetEntryTypesFromSpecifics(entry.specifics)
211  if not entry_types:
212    return None
213
214  # If there is more than one, either there's a bug, or else the caller
215  # should use GetEntryTypes.
216  if len(entry_types) > 1:
217    raise ProtobufDataTypeFieldNotUnique
218  return entry_types[0]
219
220
221def GetEntryTypesFromSpecifics(specifics):
222  """Determine the sync types indicated by an EntitySpecifics's field(s).
223
224  If the specifics have more than one recognized data type field (as commonly
225  happens with the requested_types field of GetUpdatesMessage), all types
226  will be returned.  Callers must handle the possibility of the returned
227  value having more than one item.
228
229  Args:
230    specifics: A EntitySpecifics protobuf message whose extensions to
231      enumerate.
232  Returns:
233    A list of the sync types (values from ALL_TYPES) associated with each
234    recognized extension of the specifics message.
235  """
236  return [data_type for data_type, field_descriptor
237          in SYNC_TYPE_TO_DESCRIPTOR.iteritems()
238          if specifics.HasField(field_descriptor.name)]
239
240
241def SyncTypeToProtocolDataTypeId(data_type):
242  """Convert from a sync type (python enum) to the protocol's data type id."""
243  return SYNC_TYPE_TO_DESCRIPTOR[data_type].number
244
245
246def ProtocolDataTypeIdToSyncType(protocol_data_type_id):
247  """Convert from the protocol's data type id to a sync type (python enum)."""
248  for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
249    if field_descriptor.number == protocol_data_type_id:
250      return data_type
251  raise DataTypeIdNotRecognized
252
253
254def DataTypeStringToSyncTypeLoose(data_type_string):
255  """Converts a human-readable string to a sync type (python enum).
256
257  Capitalization and pluralization don't matter; this function is appropriate
258  for values that might have been typed by a human being; e.g., command-line
259  flags or query parameters.
260  """
261  if data_type_string.isdigit():
262    return ProtocolDataTypeIdToSyncType(int(data_type_string))
263  name = data_type_string.lower().rstrip('s')
264  for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
265    if field_descriptor.name.lower().rstrip('s') == name:
266      return data_type
267  raise DataTypeIdNotRecognized
268
269
270def MakeNewKeystoreKey():
271  """Returns a new random keystore key."""
272  return ''.join(random.choice(string.ascii_uppercase + string.digits)
273        for x in xrange(KEYSTORE_KEY_LENGTH))
274
275
276def SyncTypeToString(data_type):
277  """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
278  return SYNC_TYPE_TO_DESCRIPTOR[data_type].name
279
280
281def CallerInfoToString(caller_info_source):
282  """Formats a GetUpdatesSource enum value to a readable string."""
283  return get_updates_caller_info_pb2.GetUpdatesCallerInfo \
284      .DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \
285      .values_by_number[caller_info_source].name
286
287
288def ShortDatatypeListSummary(data_types):
289  """Formats compactly a list of sync types (python enums) for human eyes.
290
291  This function is intended for use by logging.  If the list of datatypes
292  contains almost all of the values, the return value will be expressed
293  in terms of the datatypes that aren't set.
294  """
295  included = set(data_types) - set([TOP_LEVEL])
296  if not included:
297    return 'nothing'
298  excluded = set(ALL_TYPES) - included - set([TOP_LEVEL])
299  if not excluded:
300    return 'everything'
301  simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included]))
302  all_but_text = 'all except %s' % (
303      '+'.join(sorted([SyncTypeToString(x) for x in excluded])))
304  if len(included) < len(excluded) or len(simple_text) <= len(all_but_text):
305    return simple_text
306  else:
307    return all_but_text
308
309
310def GetDefaultEntitySpecifics(data_type):
311  """Get an EntitySpecifics having a sync type's default field value."""
312  specifics = sync_pb2.EntitySpecifics()
313  if data_type in SYNC_TYPE_TO_DESCRIPTOR:
314    descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type]
315    getattr(specifics, descriptor.name).SetInParent()
316  return specifics
317
318
319class PermanentItem(object):
320  """A specification of one server-created permanent item.
321
322  Attributes:
323    tag: A known-to-the-client value that uniquely identifies a server-created
324      permanent item.
325    name: The human-readable display name for this item.
326    parent_tag: The tag of the permanent item's parent.  If ROOT_ID, indicates
327      a top-level item.  Otherwise, this must be the tag value of some other
328      server-created permanent item.
329    sync_type: A value from ALL_TYPES, giving the datatype of this permanent
330      item.  This controls which types of client GetUpdates requests will
331      cause the permanent item to be created and returned.
332    create_by_default: Whether the permanent item is created at startup or not.
333      This value is set to True in the default case. Non-default permanent items
334      are those that are created only when a client explicitly tells the server
335      to do so.
336  """
337
338  def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True):
339    self.tag = tag
340    self.name = name
341    self.parent_tag = parent_tag
342    self.sync_type = sync_type
343    self.create_by_default = create_by_default
344
345
346class MigrationHistory(object):
347  """A record of the migration events associated with an account.
348
349  Each migration event invalidates one or more datatypes on all clients
350  that had synced the datatype before the event.  Such clients will continue
351  to receive MigrationDone errors until they throw away their progress and
352  re-sync that datatype from the beginning.
353  """
354  def __init__(self):
355    self._migrations = {}
356    for datatype in ALL_TYPES:
357      self._migrations[datatype] = [1]
358    self._next_migration_version = 2
359
360  def GetLatestVersion(self, datatype):
361    return self._migrations[datatype][-1]
362
363  def CheckAllCurrent(self, versions_map):
364    """Raises an error if any the provided versions are out of date.
365
366    This function intentionally returns migrations in the order that they were
367    triggered.  Doing it this way allows the client to queue up two migrations
368    in a row, so the second one is received while responding to the first.
369
370    Arguments:
371      version_map: a map whose keys are datatypes and whose values are versions.
372
373    Raises:
374      MigrationDoneError: if a mismatch is found.
375    """
376    problems = {}
377    for datatype, client_migration in versions_map.iteritems():
378      for server_migration in self._migrations[datatype]:
379        if client_migration < server_migration:
380          problems.setdefault(server_migration, []).append(datatype)
381    if problems:
382      raise MigrationDoneError(problems[min(problems.keys())])
383
384  def Bump(self, datatypes):
385    """Add a record of a migration, to cause errors on future requests."""
386    for idx, datatype in enumerate(datatypes):
387      self._migrations[datatype].append(self._next_migration_version)
388    self._next_migration_version += 1
389
390
391class UpdateSieve(object):
392  """A filter to remove items the client has already seen."""
393  def __init__(self, request, migration_history=None):
394    self._original_request = request
395    self._state = {}
396    self._migration_history = migration_history or MigrationHistory()
397    self._migration_versions_to_check = {}
398    if request.from_progress_marker:
399      for marker in request.from_progress_marker:
400        data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
401        if marker.HasField('timestamp_token_for_migration'):
402          timestamp = marker.timestamp_token_for_migration
403          if timestamp:
404            self._migration_versions_to_check[data_type] = 1
405        elif marker.token:
406          (timestamp, version) = pickle.loads(marker.token)
407          self._migration_versions_to_check[data_type] = version
408        elif marker.HasField('token'):
409          timestamp = 0
410        else:
411          raise ValueError('No timestamp information in progress marker.')
412        data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
413        self._state[data_type] = timestamp
414    elif request.HasField('from_timestamp'):
415      for data_type in GetEntryTypesFromSpecifics(request.requested_types):
416        self._state[data_type] = request.from_timestamp
417        self._migration_versions_to_check[data_type] = 1
418    if self._state:
419      self._state[TOP_LEVEL] = min(self._state.itervalues())
420
421  def SummarizeRequest(self):
422    timestamps = {}
423    for data_type, timestamp in self._state.iteritems():
424      if data_type == TOP_LEVEL:
425        continue
426      timestamps.setdefault(timestamp, []).append(data_type)
427    return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp)
428                     for stamp, types in sorted(timestamps.iteritems()))
429
430  def CheckMigrationState(self):
431    self._migration_history.CheckAllCurrent(self._migration_versions_to_check)
432
433  def ClientWantsItem(self, item):
434    """Return true if the client hasn't already seen an item."""
435    return self._state.get(GetEntryType(item), sys.maxint) < item.version
436
437  def HasAnyTimestamp(self):
438    """Return true if at least one datatype was requested."""
439    return bool(self._state)
440
441  def GetMinTimestamp(self):
442    """Return true the smallest timestamp requested across all datatypes."""
443    return min(self._state.itervalues())
444
445  def GetFirstTimeTypes(self):
446    """Return a list of datatypes requesting updates from timestamp zero."""
447    return [datatype for datatype, timestamp in self._state.iteritems()
448            if timestamp == 0]
449
450  def GetCreateMobileBookmarks(self):
451    """Return true if the client has requested to create the 'Mobile Bookmarks'
452       folder.
453    """
454    return (self._original_request.HasField('create_mobile_bookmarks_folder')
455            and self._original_request.create_mobile_bookmarks_folder)
456
457  def SaveProgress(self, new_timestamp, get_updates_response):
458    """Write the new_timestamp or new_progress_marker fields to a response."""
459    if self._original_request.from_progress_marker:
460      for data_type, old_timestamp in self._state.iteritems():
461        if data_type == TOP_LEVEL:
462          continue
463        new_marker = sync_pb2.DataTypeProgressMarker()
464        new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type)
465        final_stamp = max(old_timestamp, new_timestamp)
466        final_migration = self._migration_history.GetLatestVersion(data_type)
467        new_marker.token = pickle.dumps((final_stamp, final_migration))
468        get_updates_response.new_progress_marker.add().MergeFrom(new_marker)
469    elif self._original_request.HasField('from_timestamp'):
470      if self._original_request.from_timestamp < new_timestamp:
471        get_updates_response.new_timestamp = new_timestamp
472
473
474class SyncDataModel(object):
475  """Models the account state of one sync user."""
476  _BATCH_SIZE = 100
477
478  # Specify all the permanent items that a model might need.
479  _PERMANENT_ITEM_SPECS = [
480      PermanentItem('google_chrome_apps', name='Apps',
481                    parent_tag=ROOT_ID, sync_type=APPS),
482      PermanentItem('google_chrome_app_list', name='App List',
483                    parent_tag=ROOT_ID, sync_type=APP_LIST),
484      PermanentItem('google_chrome_app_notifications', name='App Notifications',
485                    parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION),
486      PermanentItem('google_chrome_app_settings',
487                    name='App Settings',
488                    parent_tag=ROOT_ID, sync_type=APP_SETTINGS),
489      PermanentItem('google_chrome_bookmarks', name='Bookmarks',
490                    parent_tag=ROOT_ID, sync_type=BOOKMARK),
491      PermanentItem('bookmark_bar', name='Bookmark Bar',
492                    parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
493      PermanentItem('other_bookmarks', name='Other Bookmarks',
494                    parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
495      PermanentItem('synced_bookmarks', name='Synced Bookmarks',
496                    parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK,
497                    create_by_default=False),
498      PermanentItem('google_chrome_autofill', name='Autofill',
499                    parent_tag=ROOT_ID, sync_type=AUTOFILL),
500      PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles',
501                    parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE),
502      PermanentItem('google_chrome_device_info', name='Device Info',
503                    parent_tag=ROOT_ID, sync_type=DEVICE_INFO),
504      PermanentItem('google_chrome_experiments', name='Experiments',
505                    parent_tag=ROOT_ID, sync_type=EXPERIMENTS),
506      PermanentItem('google_chrome_extension_settings',
507                    name='Extension Settings',
508                    parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS),
509      PermanentItem('google_chrome_extensions', name='Extensions',
510                    parent_tag=ROOT_ID, sync_type=EXTENSIONS),
511      PermanentItem('google_chrome_history_delete_directives',
512                    name='History Delete Directives',
513                    parent_tag=ROOT_ID,
514                    sync_type=HISTORY_DELETE_DIRECTIVE),
515      PermanentItem('google_chrome_favicon_images',
516                    name='Favicon Images',
517                    parent_tag=ROOT_ID,
518                    sync_type=FAVICON_IMAGES),
519      PermanentItem('google_chrome_favicon_tracking',
520                    name='Favicon Tracking',
521                    parent_tag=ROOT_ID,
522                    sync_type=FAVICON_TRACKING),
523      PermanentItem('google_chrome_managed_user_settings',
524                    name='Managed User Settings',
525                    parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING),
526      PermanentItem('google_chrome_managed_users',
527                    name='Managed Users',
528                    parent_tag=ROOT_ID, sync_type=MANAGED_USER),
529      PermanentItem('google_chrome_nigori', name='Nigori',
530                    parent_tag=ROOT_ID, sync_type=NIGORI),
531      PermanentItem('google_chrome_passwords', name='Passwords',
532                    parent_tag=ROOT_ID, sync_type=PASSWORD),
533      PermanentItem('google_chrome_preferences', name='Preferences',
534                    parent_tag=ROOT_ID, sync_type=PREFERENCE),
535      PermanentItem('google_chrome_priority_preferences',
536                    name='Priority Preferences',
537                    parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE),
538      PermanentItem('google_chrome_synced_notifications',
539                    name='Synced Notifications',
540                    parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION),
541      PermanentItem('google_chrome_search_engines', name='Search Engines',
542                    parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE),
543      PermanentItem('google_chrome_sessions', name='Sessions',
544                    parent_tag=ROOT_ID, sync_type=SESSION),
545      PermanentItem('google_chrome_themes', name='Themes',
546                    parent_tag=ROOT_ID, sync_type=THEME),
547      PermanentItem('google_chrome_typed_urls', name='Typed URLs',
548                    parent_tag=ROOT_ID, sync_type=TYPED_URL),
549      PermanentItem('google_chrome_dictionary', name='Dictionary',
550                    parent_tag=ROOT_ID, sync_type=DICTIONARY),
551      PermanentItem('google_chrome_articles', name='Articles',
552                    parent_tag=ROOT_ID, sync_type=ARTICLE),
553      ]
554
555  def __init__(self):
556    # Monotonically increasing version number.  The next object change will
557    # take on this value + 1.
558    self._version = 0
559
560    # The definitive copy of this client's items: a map from ID string to a
561    # SyncEntity protocol buffer.
562    self._entries = {}
563
564    self.ResetStoreBirthday()
565    self.migration_history = MigrationHistory()
566    self.induced_error = sync_pb2.ClientToServerResponse.Error()
567    self.induced_error_frequency = 0
568    self.sync_count_before_errors = 0
569    self.acknowledge_managed_users = False
570    self._keys = [MakeNewKeystoreKey()]
571
572  def _SaveEntry(self, entry):
573    """Insert or update an entry in the change log, and give it a new version.
574
575    The ID fields of this entry are assumed to be valid server IDs.  This
576    entry will be updated with a new version number and sync_timestamp.
577
578    Args:
579      entry: The entry to be added or updated.
580    """
581    self._version += 1
582    # Maintain a global (rather than per-item) sequence number and use it
583    # both as the per-entry version as well as the update-progress timestamp.
584    # This simulates the behavior of the original server implementation.
585    entry.version = self._version
586    entry.sync_timestamp = self._version
587
588    # Preserve the originator info, which the client is not required to send
589    # when updating.
590    base_entry = self._entries.get(entry.id_string)
591    if base_entry:
592      entry.originator_cache_guid = base_entry.originator_cache_guid
593      entry.originator_client_item_id = base_entry.originator_client_item_id
594
595    self._entries[entry.id_string] = copy.deepcopy(entry)
596
597  def _ServerTagToId(self, tag):
598    """Determine the server ID from a server-unique tag.
599
600    The resulting value is guaranteed not to collide with the other ID
601    generation methods.
602
603    Args:
604      datatype: The sync type (python enum) of the identified object.
605      tag: The unique, known-to-the-client tag of a server-generated item.
606    Returns:
607      The string value of the computed server ID.
608    """
609    if not tag or tag == ROOT_ID:
610      return tag
611    spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0]
612    return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag)
613
614  def _ClientTagToId(self, datatype, tag):
615    """Determine the server ID from a client-unique tag.
616
617    The resulting value is guaranteed not to collide with the other ID
618    generation methods.
619
620    Args:
621      datatype: The sync type (python enum) of the identified object.
622      tag: The unique, opaque-to-the-server tag of a client-tagged item.
623    Returns:
624      The string value of the computed server ID.
625    """
626    return self._MakeCurrentId(datatype, '<client tag>%s' % tag)
627
628  def _ClientIdToId(self, datatype, client_guid, client_item_id):
629    """Compute a unique server ID from a client-local ID tag.
630
631    The resulting value is guaranteed not to collide with the other ID
632    generation methods.
633
634    Args:
635      datatype: The sync type (python enum) of the identified object.
636      client_guid: A globally unique ID that identifies the client which
637        created this item.
638      client_item_id: An ID that uniquely identifies this item on the client
639        which created it.
640    Returns:
641      The string value of the computed server ID.
642    """
643    # Using the client ID info is not required here (we could instead generate
644    # a random ID), but it's useful for debugging.
645    return self._MakeCurrentId(datatype,
646        '<server ID originally>%s/%s' % (client_guid, client_item_id))
647
648  def _MakeCurrentId(self, datatype, inner_id):
649    return '%d^%d^%s' % (datatype,
650                         self.migration_history.GetLatestVersion(datatype),
651                         inner_id)
652
653  def _ExtractIdInfo(self, id_string):
654    if not id_string or id_string == ROOT_ID:
655      return None
656    datatype_string, separator, remainder = id_string.partition('^')
657    migration_version_string, separator, inner_id = remainder.partition('^')
658    return (int(datatype_string), int(migration_version_string), inner_id)
659
660  def _WritePosition(self, entry, parent_id):
661    """Ensure the entry has an absolute, numeric position and parent_id.
662
663    Historically, clients would specify positions using the predecessor-based
664    references in the insert_after_item_id field; starting July 2011, this
665    was changed and Chrome now sends up the absolute position.  The server
666    must store a position_in_parent value and must not maintain
667    insert_after_item_id.
668    Starting in Jan 2013, the client will also send up a unique_position field
669    which should be saved and returned on subsequent GetUpdates.
670
671    Args:
672      entry: The entry for which to write a position.  Its ID field are
673        assumed to be server IDs.  This entry will have its parent_id_string,
674        position_in_parent and unique_position fields updated; its
675        insert_after_item_id field will be cleared.
676      parent_id: The ID of the entry intended as the new parent.
677    """
678
679    entry.parent_id_string = parent_id
680    if not entry.HasField('position_in_parent'):
681      entry.position_in_parent = 1337  # A debuggable, distinctive default.
682    entry.ClearField('insert_after_item_id')
683
684  def _ItemExists(self, id_string):
685    """Determine whether an item exists in the changelog."""
686    return id_string in self._entries
687
688  def _CreatePermanentItem(self, spec):
689    """Create one permanent item from its spec, if it doesn't exist.
690
691    The resulting item is added to the changelog.
692
693    Args:
694      spec: A PermanentItem object holding the properties of the item to create.
695    """
696    id_string = self._ServerTagToId(spec.tag)
697    if self._ItemExists(id_string):
698      return
699    print 'Creating permanent item: %s' % spec.name
700    entry = sync_pb2.SyncEntity()
701    entry.id_string = id_string
702    entry.non_unique_name = spec.name
703    entry.name = spec.name
704    entry.server_defined_unique_tag = spec.tag
705    entry.folder = True
706    entry.deleted = False
707    entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
708    self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
709    self._SaveEntry(entry)
710
711  def _CreateDefaultPermanentItems(self, requested_types):
712    """Ensure creation of all default permanent items for a given set of types.
713
714    Args:
715      requested_types: A list of sync data types from ALL_TYPES.
716        All default permanent items of only these types will be created.
717    """
718    for spec in self._PERMANENT_ITEM_SPECS:
719      if spec.sync_type in requested_types and spec.create_by_default:
720        self._CreatePermanentItem(spec)
721
722  def ResetStoreBirthday(self):
723    """Resets the store birthday to a random value."""
724    # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
725    self.store_birthday = '%0.30f' % random.random()
726
727  def StoreBirthday(self):
728    """Gets the store birthday."""
729    return self.store_birthday
730
731  def GetChanges(self, sieve):
732    """Get entries which have changed, oldest first.
733
734    The returned entries are limited to being _BATCH_SIZE many.  The entries
735    are returned in strict version order.
736
737    Args:
738      sieve: An update sieve to use to filter out updates the client
739        has already seen.
740    Returns:
741      A tuple of (version, entries, changes_remaining).  Version is a new
742      timestamp value, which should be used as the starting point for the
743      next query.  Entries is the batch of entries meeting the current
744      timestamp query.  Changes_remaining indicates the number of changes
745      left on the server after this batch.
746    """
747    if not sieve.HasAnyTimestamp():
748      return (0, [], 0)
749    min_timestamp = sieve.GetMinTimestamp()
750    first_time_types = sieve.GetFirstTimeTypes()
751    self._CreateDefaultPermanentItems(first_time_types)
752    # Mobile bookmark folder is not created by default, create it only when
753    # client requested it.
754    if (sieve.GetCreateMobileBookmarks() and
755        first_time_types.count(BOOKMARK) > 0):
756      self.TriggerCreateSyncedBookmarks()
757
758    self.TriggerAcknowledgeManagedUsers()
759
760    change_log = sorted(self._entries.values(),
761                        key=operator.attrgetter('version'))
762    new_changes = [x for x in change_log if x.version > min_timestamp]
763    # Pick batch_size new changes, and then filter them.  This matches
764    # the RPC behavior of the production sync server.
765    batch = new_changes[:self._BATCH_SIZE]
766    if not batch:
767      # Client is up to date.
768      return (min_timestamp, [], 0)
769
770    # Restrict batch to requested types.  Tombstones are untyped
771    # and will always get included.
772    filtered = [copy.deepcopy(item) for item in batch
773                if item.deleted or sieve.ClientWantsItem(item)]
774
775    # The new client timestamp is the timestamp of the last item in the
776    # batch, even if that item was filtered out.
777    return (batch[-1].version, filtered, len(new_changes) - len(batch))
778
779  def GetKeystoreKeys(self):
780    """Returns the encryption keys for this account."""
781    print "Returning encryption keys: %s" % self._keys
782    return self._keys
783
784  def _CopyOverImmutableFields(self, entry):
785    """Preserve immutable fields by copying pre-commit state.
786
787    Args:
788      entry: A sync entity from the client.
789    """
790    if entry.id_string in self._entries:
791      if self._entries[entry.id_string].HasField(
792          'server_defined_unique_tag'):
793        entry.server_defined_unique_tag = (
794            self._entries[entry.id_string].server_defined_unique_tag)
795
796  def _CheckVersionForCommit(self, entry):
797    """Perform an optimistic concurrency check on the version number.
798
799    Clients are only allowed to commit if they report having seen the most
800    recent version of an object.
801
802    Args:
803      entry: A sync entity from the client.  It is assumed that ID fields
804        have been converted to server IDs.
805    Returns:
806      A boolean value indicating whether the client's version matches the
807      newest server version for the given entry.
808    """
809    if entry.id_string in self._entries:
810      # Allow edits/deletes if the version matches, and any undeletion.
811      return (self._entries[entry.id_string].version == entry.version or
812              self._entries[entry.id_string].deleted)
813    else:
814      # Allow unknown ID only if the client thinks it's new too.
815      return entry.version == 0
816
817  def _CheckParentIdForCommit(self, entry):
818    """Check that the parent ID referenced in a SyncEntity actually exists.
819
820    Args:
821      entry: A sync entity from the client.  It is assumed that ID fields
822        have been converted to server IDs.
823    Returns:
824      A boolean value indicating whether the entity's parent ID is an object
825      that actually exists (and is not deleted) in the current account state.
826    """
827    if entry.parent_id_string == ROOT_ID:
828      # This is generally allowed.
829      return True
830    if entry.parent_id_string not in self._entries:
831      print 'Warning: Client sent unknown ID.  Should never happen.'
832      return False
833    if entry.parent_id_string == entry.id_string:
834      print 'Warning: Client sent circular reference.  Should never happen.'
835      return False
836    if self._entries[entry.parent_id_string].deleted:
837      # This can happen in a race condition between two clients.
838      return False
839    if not self._entries[entry.parent_id_string].folder:
840      print 'Warning: Client sent non-folder parent.  Should never happen.'
841      return False
842    return True
843
844  def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
845    """Convert ID fields in a client sync entry to server IDs.
846
847    A commit batch sent by a client may contain new items for which the
848    server has not generated IDs yet.  And within a commit batch, later
849    items are allowed to refer to earlier items.  This method will
850    generate server IDs for new items, as well as rewrite references
851    to items whose server IDs were generated earlier in the batch.
852
853    Args:
854      entry: The client sync entry to modify.
855      cache_guid: The globally unique ID of the client that sent this
856        commit request.
857      commit_session: A dictionary mapping the original IDs to the new server
858        IDs, for any items committed earlier in the batch.
859    """
860    if entry.version == 0:
861      data_type = GetEntryType(entry)
862      if entry.HasField('client_defined_unique_tag'):
863        # When present, this should determine the item's ID.
864        new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag)
865      else:
866        new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string)
867        entry.originator_cache_guid = cache_guid
868        entry.originator_client_item_id = entry.id_string
869      commit_session[entry.id_string] = new_id  # Remember the remapping.
870      entry.id_string = new_id
871    if entry.parent_id_string in commit_session:
872      entry.parent_id_string = commit_session[entry.parent_id_string]
873    if entry.insert_after_item_id in commit_session:
874      entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
875
876  def ValidateCommitEntries(self, entries):
877    """Raise an exception if a commit batch contains any global errors.
878
879    Arguments:
880      entries: an iterable containing commit-form SyncEntity protocol buffers.
881
882    Raises:
883      MigrationDoneError: if any of the entries reference a recently-migrated
884        datatype.
885    """
886    server_ids_in_commit = set()
887    local_ids_in_commit = set()
888    for entry in entries:
889      if entry.version:
890        server_ids_in_commit.add(entry.id_string)
891      else:
892        local_ids_in_commit.add(entry.id_string)
893      if entry.HasField('parent_id_string'):
894        if entry.parent_id_string not in local_ids_in_commit:
895          server_ids_in_commit.add(entry.parent_id_string)
896
897    versions_present = {}
898    for server_id in server_ids_in_commit:
899      parsed = self._ExtractIdInfo(server_id)
900      if parsed:
901        datatype, version, _ = parsed
902        versions_present.setdefault(datatype, []).append(version)
903
904    self.migration_history.CheckAllCurrent(
905         dict((k, min(v)) for k, v in versions_present.iteritems()))
906
907  def CommitEntry(self, entry, cache_guid, commit_session):
908    """Attempt to commit one entry to the user's account.
909
910    Args:
911      entry: A SyncEntity protobuf representing desired object changes.
912      cache_guid: A string value uniquely identifying the client; this
913        is used for ID generation and will determine the originator_cache_guid
914        if the entry is new.
915      commit_session: A dictionary mapping client IDs to server IDs for any
916        objects committed earlier this session.  If the entry gets a new ID
917        during commit, the change will be recorded here.
918    Returns:
919      A SyncEntity reflecting the post-commit value of the entry, or None
920      if the entry was not committed due to an error.
921    """
922    entry = copy.deepcopy(entry)
923
924    # Generate server IDs for this entry, and write generated server IDs
925    # from earlier entries into the message's fields, as appropriate.  The
926    # ID generation state is stored in 'commit_session'.
927    self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
928
929    # Perform the optimistic concurrency check on the entry's version number.
930    # Clients are not allowed to commit unless they indicate that they've seen
931    # the most recent version of an object.
932    if not self._CheckVersionForCommit(entry):
933      return None
934
935    # Check the validity of the parent ID; it must exist at this point.
936    # TODO(nick): Implement cycle detection and resolution.
937    if not self._CheckParentIdForCommit(entry):
938      return None
939
940    self._CopyOverImmutableFields(entry);
941
942    # At this point, the commit is definitely going to happen.
943
944    # Deletion works by storing a limited record for an entry, called a
945    # tombstone.  A sync server must track deleted IDs forever, since it does
946    # not keep track of client knowledge (there's no deletion ACK event).
947    if entry.deleted:
948      def MakeTombstone(id_string, datatype):
949        """Make a tombstone entry that will replace the entry being deleted.
950
951        Args:
952          id_string: Index of the SyncEntity to be deleted.
953        Returns:
954          A new SyncEntity reflecting the fact that the entry is deleted.
955        """
956        # Only the ID, version and deletion state are preserved on a tombstone.
957        tombstone = sync_pb2.SyncEntity()
958        tombstone.id_string = id_string
959        tombstone.deleted = True
960        tombstone.name = ''
961        tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype))
962        return tombstone
963
964      def IsChild(child_id):
965        """Check if a SyncEntity is a child of entry, or any of its children.
966
967        Args:
968          child_id: Index of the SyncEntity that is a possible child of entry.
969        Returns:
970          True if it is a child; false otherwise.
971        """
972        if child_id not in self._entries:
973          return False
974        if self._entries[child_id].parent_id_string == entry.id_string:
975          return True
976        return IsChild(self._entries[child_id].parent_id_string)
977
978      # Identify any children entry might have.
979      child_ids = [child.id_string for child in self._entries.itervalues()
980                   if IsChild(child.id_string)]
981
982      # Mark all children that were identified as deleted.
983      for child_id in child_ids:
984        datatype = GetEntryType(self._entries[child_id])
985        self._SaveEntry(MakeTombstone(child_id, datatype))
986
987      # Delete entry itself.
988      datatype = GetEntryType(self._entries[entry.id_string])
989      entry = MakeTombstone(entry.id_string, datatype)
990    else:
991      # Comments in sync.proto detail how the representation of positional
992      # ordering works.
993      #
994      # We've almost fully deprecated the 'insert_after_item_id' field.
995      # The 'position_in_parent' field is also deprecated, but as of Jan 2013
996      # is still in common use.  The 'unique_position' field is the latest
997      # and greatest in positioning technology.
998      #
999      # This server supports 'position_in_parent' and 'unique_position'.
1000      self._WritePosition(entry, entry.parent_id_string)
1001
1002    # Preserve the originator info, which the client is not required to send
1003    # when updating.
1004    base_entry = self._entries.get(entry.id_string)
1005    if base_entry and not entry.HasField('originator_cache_guid'):
1006      entry.originator_cache_guid = base_entry.originator_cache_guid
1007      entry.originator_client_item_id = base_entry.originator_client_item_id
1008
1009    # Store the current time since the Unix epoch in milliseconds.
1010    entry.mtime = (int((time.mktime(time.gmtime()) -
1011        (time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000))
1012
1013    # Commit the change.  This also updates the version number.
1014    self._SaveEntry(entry)
1015    return entry
1016
1017  def _RewriteVersionInId(self, id_string):
1018    """Rewrites an ID so that its migration version becomes current."""
1019    parsed_id = self._ExtractIdInfo(id_string)
1020    if not parsed_id:
1021      return id_string
1022    datatype, old_migration_version, inner_id = parsed_id
1023    return self._MakeCurrentId(datatype, inner_id)
1024
1025  def TriggerMigration(self, datatypes):
1026    """Cause a migration to occur for a set of datatypes on this account.
1027
1028    Clients will see the MIGRATION_DONE error for these datatypes until they
1029    resync them.
1030    """
1031    versions_to_remap = self.migration_history.Bump(datatypes)
1032    all_entries = self._entries.values()
1033    self._entries.clear()
1034    for entry in all_entries:
1035      new_id = self._RewriteVersionInId(entry.id_string)
1036      entry.id_string = new_id
1037      if entry.HasField('parent_id_string'):
1038        entry.parent_id_string = self._RewriteVersionInId(
1039            entry.parent_id_string)
1040      self._entries[entry.id_string] = entry
1041
1042  def TriggerSyncTabFavicons(self):
1043    """Set the 'sync_tab_favicons' field to this account's nigori node.
1044
1045    If the field is not currently set, will write a new nigori node entry
1046    with the field set. Else does nothing.
1047    """
1048
1049    nigori_tag = "google_chrome_nigori"
1050    nigori_original = self._entries.get(self._ServerTagToId(nigori_tag))
1051    if (nigori_original.specifics.nigori.sync_tab_favicons):
1052      return
1053    nigori_new = copy.deepcopy(nigori_original)
1054    nigori_new.specifics.nigori.sync_tabs = True
1055    self._SaveEntry(nigori_new)
1056
1057  def TriggerCreateSyncedBookmarks(self):
1058    """Create the Synced Bookmarks folder under the Bookmarks permanent item.
1059
1060    Clients will then receive the Synced Bookmarks folder on future
1061    GetUpdates, and new bookmarks can be added within the Synced Bookmarks
1062    folder.
1063    """
1064
1065    synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS
1066                              if spec.name == "Synced Bookmarks"]
1067    self._CreatePermanentItem(synced_bookmarks_spec)
1068
1069  def TriggerEnableKeystoreEncryption(self):
1070    """Create the keystore_encryption experiment entity and enable it.
1071
1072    A new entity within the EXPERIMENTS datatype is created with the unique
1073    client tag "keystore_encryption" if it doesn't already exist. The
1074    keystore_encryption message is then filled with |enabled| set to true.
1075    """
1076
1077    experiment_id = self._ServerTagToId("google_chrome_experiments")
1078    keystore_encryption_id = self._ClientTagToId(
1079        EXPERIMENTS,
1080        KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1081    keystore_entry = self._entries.get(keystore_encryption_id)
1082    if keystore_entry is None:
1083      keystore_entry = sync_pb2.SyncEntity()
1084      keystore_entry.id_string = keystore_encryption_id
1085      keystore_entry.name = "Keystore Encryption"
1086      keystore_entry.client_defined_unique_tag = (
1087          KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1088      keystore_entry.folder = False
1089      keystore_entry.deleted = False
1090      keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1091      self._WritePosition(keystore_entry, experiment_id)
1092
1093    keystore_entry.specifics.experiments.keystore_encryption.enabled = True
1094
1095    self._SaveEntry(keystore_entry)
1096
1097  def TriggerRotateKeystoreKeys(self):
1098    """Rotate the current set of keystore encryption keys.
1099
1100    |self._keys| will have a new random encryption key appended to it. We touch
1101    the nigori node so that each client will receive the new encryption keys
1102    only once.
1103    """
1104
1105    # Add a new encryption key.
1106    self._keys += [MakeNewKeystoreKey(), ]
1107
1108    # Increment the nigori node's timestamp, so clients will get the new keys
1109    # on their next GetUpdates (any time the nigori node is sent back, we also
1110    # send back the keystore keys).
1111    nigori_tag = "google_chrome_nigori"
1112    self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag)))
1113
1114  def TriggerAcknowledgeManagedUsers(self):
1115    """Set the "acknowledged" flag for any managed user entities that don't have
1116       it set already.
1117    """
1118
1119    if not self.acknowledge_managed_users:
1120      return
1121
1122    managed_users = [copy.deepcopy(entry) for entry in self._entries.values()
1123                     if entry.specifics.HasField('managed_user')
1124                     and not entry.specifics.managed_user.acknowledged]
1125    for user in managed_users:
1126      user.specifics.managed_user.acknowledged = True
1127      self._SaveEntry(user)
1128
1129  def TriggerEnablePreCommitGetUpdateAvoidance(self):
1130    """Sets the experiment to enable pre-commit GetUpdate avoidance."""
1131    experiment_id = self._ServerTagToId("google_chrome_experiments")
1132    pre_commit_gu_avoidance_id = self._ClientTagToId(
1133        EXPERIMENTS,
1134        PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG)
1135    entry = self._entries.get(pre_commit_gu_avoidance_id)
1136    if entry is None:
1137      entry = sync_pb2.SyncEntity()
1138      entry.id_string = pre_commit_gu_avoidance_id
1139      entry.name = "Pre-commit GU avoidance"
1140      entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
1141      entry.folder = False
1142      entry.deleted = False
1143      entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1144      self._WritePosition(entry, experiment_id)
1145    entry.specifics.experiments.pre_commit_update_avoidance.enabled = True
1146    self._SaveEntry(entry)
1147
1148  def SetInducedError(self, error, error_frequency,
1149                      sync_count_before_errors):
1150    self.induced_error = error
1151    self.induced_error_frequency = error_frequency
1152    self.sync_count_before_errors = sync_count_before_errors
1153
1154  def GetInducedError(self):
1155    return self.induced_error
1156
1157  def AddSyncedNotification(self, serialized_notification):
1158    """Adds a synced notification to the server data.
1159
1160    The notification will be delivered to the client on the next GetUpdates
1161    call.
1162
1163    Args:
1164      serialized_notification: A serialized CoalescedSyncedNotification.
1165
1166    Returns:
1167      The string representation of the added SyncEntity.
1168
1169    Raises:
1170      ClientNotConnectedError: if the client has not yet connected to this
1171      server
1172    """
1173    # A unique string used wherever a unique ID for this notification is
1174    # required.
1175    unique_notification_id = str(uuid.uuid4())
1176
1177    specifics = self._CreateSyncedNotificationEntitySpecifics(
1178        unique_notification_id, serialized_notification)
1179
1180    # Create the root SyncEntity representing a single notification.
1181    entity = sync_pb2.SyncEntity()
1182    entity.specifics.CopyFrom(specifics)
1183    entity.parent_id_string = self._ServerTagToId(
1184        'google_chrome_synced_notifications')
1185    entity.name = 'Synced notification added for testing'
1186    entity.server_defined_unique_tag = unique_notification_id
1187
1188    # Set the version to one more than the greatest version number already seen.
1189    entries = sorted(self._entries.values(), key=operator.attrgetter('version'))
1190    if len(entries) < 1:
1191      raise ClientNotConnectedError
1192    entity.version = entries[-1].version + 1
1193
1194    entity.client_defined_unique_tag = self._CreateSyncedNotificationClientTag(
1195        specifics.synced_notification.coalesced_notification.key)
1196    entity.id_string = self._ClientTagToId(GetEntryType(entity),
1197                                          entity.client_defined_unique_tag)
1198
1199    self._entries[entity.id_string] = copy.deepcopy(entity)
1200
1201    return google.protobuf.text_format.MessageToString(entity)
1202
1203  def _CreateSyncedNotificationEntitySpecifics(self, unique_id,
1204                                               serialized_notification):
1205    """Create the EntitySpecifics proto for a synced notification."""
1206    coalesced = synced_notification_data_pb2.CoalescedSyncedNotification()
1207    google.protobuf.text_format.Merge(serialized_notification, coalesced)
1208
1209    # Override the provided key so that we have a unique one.
1210    coalesced.key = unique_id
1211
1212    specifics = sync_pb2.EntitySpecifics()
1213    notification_specifics = \
1214        synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1215    notification_specifics.coalesced_notification.CopyFrom(coalesced)
1216    specifics.synced_notification.CopyFrom(notification_specifics)
1217
1218    return specifics
1219
1220
1221  def _CreateSyncedNotificationClientTag(self, key):
1222    """Create the client_defined_unique_tag value for a SyncedNotification.
1223
1224    Args:
1225      key: The entity used to create the client tag.
1226
1227    Returns:
1228      The string value of the to be used as the client_defined_unique_tag.
1229    """
1230    serialized_type = sync_pb2.EntitySpecifics()
1231    specifics = synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1232    serialized_type.synced_notification.CopyFrom(specifics)
1233    hash_input = serialized_type.SerializeToString() + key
1234    return base64.b64encode(hashlib.sha1(hash_input).digest())
1235
1236
1237class TestServer(object):
1238  """An object to handle requests for one (and only one) Chrome Sync account.
1239
1240  TestServer consumes the sync command messages that are the outermost
1241  layers of the protocol, performs the corresponding actions on its
1242  SyncDataModel, and constructs an appropriate response message.
1243  """
1244
1245  def __init__(self):
1246    # The implementation supports exactly one account; its state is here.
1247    self.account = SyncDataModel()
1248    self.account_lock = threading.Lock()
1249    # Clients that have talked to us: a map from the full client ID
1250    # to its nickname.
1251    self.clients = {}
1252    self.client_name_generator = ('+' * times + chr(c)
1253        for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z')))
1254    self.transient_error = False
1255    self.sync_count = 0
1256    # Gaia OAuth2 Token fields and their default values.
1257    self.response_code = 200
1258    self.request_token = 'rt1'
1259    self.access_token = 'at1'
1260    self.expires_in = 3600
1261    self.token_type = 'Bearer'
1262    # The ClientCommand to send back on each ServerToClientResponse. If set to
1263    # None, no ClientCommand should be sent.
1264    self._client_command = None
1265
1266
1267  def GetShortClientName(self, query):
1268    parsed = cgi.parse_qs(query[query.find('?')+1:])
1269    client_id = parsed.get('client_id')
1270    if not client_id:
1271      return '?'
1272    client_id = client_id[0]
1273    if client_id not in self.clients:
1274      self.clients[client_id] = self.client_name_generator.next()
1275    return self.clients[client_id]
1276
1277  def CheckStoreBirthday(self, request):
1278    """Raises StoreBirthdayError if the request's birthday is a mismatch."""
1279    if not request.HasField('store_birthday'):
1280      return
1281    if self.account.StoreBirthday() != request.store_birthday:
1282      raise StoreBirthdayError
1283
1284  def CheckTransientError(self):
1285    """Raises TransientError if transient_error variable is set."""
1286    if self.transient_error:
1287      raise TransientError
1288
1289  def CheckSendError(self):
1290     """Raises SyncInducedError if needed."""
1291     if (self.account.induced_error.error_type !=
1292         sync_enums_pb2.SyncEnums.UNKNOWN):
1293       # Always means return the given error for all requests.
1294       if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS:
1295         raise SyncInducedError
1296       # This means the FIRST 2 requests of every 3 requests
1297       # return an error. Don't switch the order of failures. There are
1298       # test cases that rely on the first 2 being the failure rather than
1299       # the last 2.
1300       elif (self.account.induced_error_frequency ==
1301             ERROR_FREQUENCY_TWO_THIRDS):
1302         if (((self.sync_count -
1303               self.account.sync_count_before_errors) % 3) != 0):
1304           raise SyncInducedError
1305       else:
1306         raise InducedErrorFrequencyNotDefined
1307
1308  def HandleMigrate(self, path):
1309    query = urlparse.urlparse(path)[4]
1310    code = 200
1311    self.account_lock.acquire()
1312    try:
1313      datatypes = [DataTypeStringToSyncTypeLoose(x)
1314                   for x in urlparse.parse_qs(query).get('type',[])]
1315      if datatypes:
1316        self.account.TriggerMigration(datatypes)
1317        response = 'Migrated datatypes %s' % (
1318            ' and '.join(SyncTypeToString(x).upper() for x in datatypes))
1319      else:
1320        response = 'Please specify one or more <i>type=name</i> parameters'
1321        code = 400
1322    except DataTypeIdNotRecognized, error:
1323      response = 'Could not interpret datatype name'
1324      code = 400
1325    finally:
1326      self.account_lock.release()
1327    return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
1328                (code, code, response))
1329
1330  def HandleSetInducedError(self, path):
1331     query = urlparse.urlparse(path)[4]
1332     self.account_lock.acquire()
1333     code = 200
1334     response = 'Success'
1335     error = sync_pb2.ClientToServerResponse.Error()
1336     try:
1337       error_type = urlparse.parse_qs(query)['error']
1338       action = urlparse.parse_qs(query)['action']
1339       error.error_type = int(error_type[0])
1340       error.action = int(action[0])
1341       try:
1342         error.url = (urlparse.parse_qs(query)['url'])[0]
1343       except KeyError:
1344         error.url = ''
1345       try:
1346         error.error_description =(
1347         (urlparse.parse_qs(query)['error_description'])[0])
1348       except KeyError:
1349         error.error_description = ''
1350       try:
1351         error_frequency = int((urlparse.parse_qs(query)['frequency'])[0])
1352       except KeyError:
1353         error_frequency = ERROR_FREQUENCY_ALWAYS
1354       self.account.SetInducedError(error, error_frequency, self.sync_count)
1355       response = ('Error = %d, action = %d, url = %s, description = %s' %
1356                   (error.error_type, error.action,
1357                    error.url,
1358                    error.error_description))
1359     except error:
1360       response = 'Could not parse url'
1361       code = 400
1362     finally:
1363       self.account_lock.release()
1364     return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
1365                (code, code, response))
1366
1367  def HandleCreateBirthdayError(self):
1368    self.account.ResetStoreBirthday()
1369    return (
1370        200,
1371        '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
1372
1373  def HandleSetTransientError(self):
1374    self.transient_error = True
1375    return (
1376        200,
1377        '<html><title>Transient error</title><H1>Transient error</H1></html>')
1378
1379  def HandleSetSyncTabFavicons(self):
1380    """Set 'sync_tab_favicons' field of the nigori node for this account."""
1381    self.account.TriggerSyncTabFavicons()
1382    return (
1383        200,
1384        '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
1385
1386  def HandleCreateSyncedBookmarks(self):
1387    """Create the Synced Bookmarks folder under Bookmarks."""
1388    self.account.TriggerCreateSyncedBookmarks()
1389    return (
1390        200,
1391        '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
1392
1393  def HandleEnableKeystoreEncryption(self):
1394    """Enables the keystore encryption experiment."""
1395    self.account.TriggerEnableKeystoreEncryption()
1396    return (
1397        200,
1398        '<html><title>Enable Keystore Encryption</title>'
1399            '<H1>Enable Keystore Encryption</H1></html>')
1400
1401  def HandleRotateKeystoreKeys(self):
1402    """Rotate the keystore encryption keys."""
1403    self.account.TriggerRotateKeystoreKeys()
1404    return (
1405        200,
1406        '<html><title>Rotate Keystore Keys</title>'
1407            '<H1>Rotate Keystore Keys</H1></html>')
1408
1409  def HandleEnableManagedUserAcknowledgement(self):
1410    """Enable acknowledging newly created managed users."""
1411    self.account.acknowledge_managed_users = True
1412    return (
1413        200,
1414        '<html><title>Enable Managed User Acknowledgement</title>'
1415            '<h1>Enable Managed User Acknowledgement</h1></html>')
1416
1417  def HandleEnablePreCommitGetUpdateAvoidance(self):
1418    """Enables the pre-commit GU avoidance experiment."""
1419    self.account.TriggerEnablePreCommitGetUpdateAvoidance()
1420    return (
1421        200,
1422        '<html><title>Enable pre-commit GU avoidance</title>'
1423            '<H1>Enable pre-commit GU avoidance</H1></html>')
1424
1425  def HandleCommand(self, query, raw_request):
1426    """Decode and handle a sync command from a raw input of bytes.
1427
1428    This is the main entry point for this class.  It is safe to call this
1429    method from multiple threads.
1430
1431    Args:
1432      raw_request: An iterable byte sequence to be interpreted as a sync
1433        protocol command.
1434    Returns:
1435      A tuple (response_code, raw_response); the first value is an HTTP
1436      result code, while the second value is a string of bytes which is the
1437      serialized reply to the command.
1438    """
1439    self.account_lock.acquire()
1440    self.sync_count += 1
1441    def print_context(direction):
1442      print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction,
1443                                      __name__),
1444
1445    try:
1446      request = sync_pb2.ClientToServerMessage()
1447      request.MergeFromString(raw_request)
1448      contents = request.message_contents
1449
1450      response = sync_pb2.ClientToServerResponse()
1451      response.error_code = sync_enums_pb2.SyncEnums.SUCCESS
1452
1453      if self._client_command:
1454        response.client_command.CopyFrom(self._client_command)
1455
1456      self.CheckStoreBirthday(request)
1457      response.store_birthday = self.account.store_birthday
1458      self.CheckTransientError()
1459      self.CheckSendError()
1460
1461      print_context('->')
1462
1463      if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
1464        print 'Authenticate'
1465        # We accept any authentication token, and support only one account.
1466        # TODO(nick): Mock out the GAIA authentication as well; hook up here.
1467        response.authenticate.user.email = 'syncjuser@chromium'
1468        response.authenticate.user.display_name = 'Sync J User'
1469      elif contents == sync_pb2.ClientToServerMessage.COMMIT:
1470        print 'Commit %d item(s)' % len(request.commit.entries)
1471        self.HandleCommit(request.commit, response.commit)
1472      elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
1473        print 'GetUpdates',
1474        self.HandleGetUpdates(request.get_updates, response.get_updates)
1475        print_context('<-')
1476        print '%d update(s)' % len(response.get_updates.entries)
1477      else:
1478        print 'Unrecognizable sync request!'
1479        return (400, None)  # Bad request.
1480      return (200, response.SerializeToString())
1481    except MigrationDoneError, error:
1482      print_context('<-')
1483      print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes))
1484      response = sync_pb2.ClientToServerResponse()
1485      response.store_birthday = self.account.store_birthday
1486      response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE
1487      response.migrated_data_type_id[:] = [
1488          SyncTypeToProtocolDataTypeId(x) for x in error.datatypes]
1489      return (200, response.SerializeToString())
1490    except StoreBirthdayError, error:
1491      print_context('<-')
1492      print 'NOT_MY_BIRTHDAY'
1493      response = sync_pb2.ClientToServerResponse()
1494      response.store_birthday = self.account.store_birthday
1495      response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY
1496      return (200, response.SerializeToString())
1497    except TransientError, error:
1498      ### This is deprecated now. Would be removed once test cases are removed.
1499      print_context('<-')
1500      print 'TRANSIENT_ERROR'
1501      response.store_birthday = self.account.store_birthday
1502      response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR
1503      return (200, response.SerializeToString())
1504    except SyncInducedError, error:
1505      print_context('<-')
1506      print 'INDUCED_ERROR'
1507      response.store_birthday = self.account.store_birthday
1508      error = self.account.GetInducedError()
1509      response.error.error_type = error.error_type
1510      response.error.url = error.url
1511      response.error.error_description = error.error_description
1512      response.error.action = error.action
1513      return (200, response.SerializeToString())
1514    finally:
1515      self.account_lock.release()
1516
1517  def HandleCommit(self, commit_message, commit_response):
1518    """Respond to a Commit request by updating the user's account state.
1519
1520    Commit attempts stop after the first error, returning a CONFLICT result
1521    for any unattempted entries.
1522
1523    Args:
1524      commit_message: A sync_pb.CommitMessage protobuf holding the content
1525        of the client's request.
1526      commit_response: A sync_pb.CommitResponse protobuf into which a reply
1527        to the client request will be written.
1528    """
1529    commit_response.SetInParent()
1530    batch_failure = False
1531    session = {}  # Tracks ID renaming during the commit operation.
1532    guid = commit_message.cache_guid
1533
1534    self.account.ValidateCommitEntries(commit_message.entries)
1535
1536    for entry in commit_message.entries:
1537      server_entry = None
1538      if not batch_failure:
1539        # Try to commit the change to the account.
1540        server_entry = self.account.CommitEntry(entry, guid, session)
1541
1542      # An entryresponse is returned in both success and failure cases.
1543      reply = commit_response.entryresponse.add()
1544      if not server_entry:
1545        reply.response_type = sync_pb2.CommitResponse.CONFLICT
1546        reply.error_message = 'Conflict.'
1547        batch_failure = True  # One failure halts the batch.
1548      else:
1549        reply.response_type = sync_pb2.CommitResponse.SUCCESS
1550        # These are the properties that the server is allowed to override
1551        # during commit; the client wants to know their values at the end
1552        # of the operation.
1553        reply.id_string = server_entry.id_string
1554        if not server_entry.deleted:
1555          # Note: the production server doesn't actually send the
1556          # parent_id_string on commit responses, so we don't either.
1557          reply.position_in_parent = server_entry.position_in_parent
1558          reply.version = server_entry.version
1559          reply.name = server_entry.name
1560          reply.non_unique_name = server_entry.non_unique_name
1561        else:
1562          reply.version = entry.version + 1
1563
1564  def HandleGetUpdates(self, update_request, update_response):
1565    """Respond to a GetUpdates request by querying the user's account.
1566
1567    Args:
1568      update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
1569        of the client's request.
1570      update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
1571        to the client request will be written.
1572    """
1573    update_response.SetInParent()
1574    update_sieve = UpdateSieve(update_request, self.account.migration_history)
1575
1576    print CallerInfoToString(update_request.caller_info.source),
1577    print update_sieve.SummarizeRequest()
1578
1579    update_sieve.CheckMigrationState()
1580
1581    new_timestamp, entries, remaining = self.account.GetChanges(update_sieve)
1582
1583    update_response.changes_remaining = remaining
1584    sending_nigori_node = False
1585    for entry in entries:
1586      if entry.name == 'Nigori':
1587        sending_nigori_node = True
1588      reply = update_response.entries.add()
1589      reply.CopyFrom(entry)
1590    update_sieve.SaveProgress(new_timestamp, update_response)
1591
1592    if update_request.need_encryption_key or sending_nigori_node:
1593      update_response.encryption_keys.extend(self.account.GetKeystoreKeys())
1594
1595  def HandleGetOauth2Token(self):
1596    return (int(self.response_code),
1597            '{\n'
1598            '  \"refresh_token\": \"' + self.request_token + '\",\n'
1599            '  \"access_token\": \"' + self.access_token + '\",\n'
1600            '  \"expires_in\": ' + str(self.expires_in) + ',\n'
1601            '  \"token_type\": \"' + self.token_type +'\"\n'
1602            '}')
1603
1604  def HandleSetOauth2Token(self, response_code, request_token, access_token,
1605                           expires_in, token_type):
1606    if response_code != 0:
1607      self.response_code = response_code
1608    if request_token != '':
1609      self.request_token = request_token
1610    if access_token != '':
1611      self.access_token = access_token
1612    if expires_in != 0:
1613      self.expires_in = expires_in
1614    if token_type != '':
1615      self.token_type = token_type
1616
1617    return (200,
1618            '<html><title>Set OAuth2 Token</title>'
1619            '<H1>This server will now return the OAuth2 Token:</H1>'
1620            '<p>response_code: ' + str(self.response_code) + '</p>'
1621            '<p>request_token: ' + self.request_token + '</p>'
1622            '<p>access_token: ' + self.access_token + '</p>'
1623            '<p>expires_in: ' + str(self.expires_in) + '</p>'
1624            '<p>token_type: ' + self.token_type + '</p>'
1625            '</html>')
1626
1627  def CustomizeClientCommand(self, sessions_commit_delay_seconds):
1628    """Customizes the value of the ClientCommand of ServerToClientResponse.
1629
1630    Currently, this only allows for changing the sessions_commit_delay_seconds
1631    field. This is useful for testing in conjunction with
1632    AddSyncedNotification so that synced notifications are seen immediately
1633    after triggering them with an HTTP call to the test server.
1634
1635    Args:
1636      sessions_commit_delay_seconds: The desired sync delay time for sessions.
1637    """
1638    if not self._client_command:
1639      self._client_command = client_commands_pb2.ClientCommand()
1640
1641    self._client_command.sessions_commit_delay_seconds = \
1642        sessions_commit_delay_seconds
1643    return self._client_command
1644