1 /* 2 * Copyright 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.example.android.basicsyncadapter; 18 19 import android.accounts.Account; 20 import android.annotation.TargetApi; 21 import android.content.AbstractThreadedSyncAdapter; 22 import android.content.ContentProviderClient; 23 import android.content.ContentProviderOperation; 24 import android.content.ContentResolver; 25 import android.content.Context; 26 import android.content.OperationApplicationException; 27 import android.content.SyncResult; 28 import android.database.Cursor; 29 import android.net.Uri; 30 import android.os.Build; 31 import android.os.Bundle; 32 import android.os.RemoteException; 33 import android.util.Log; 34 35 import com.example.android.basicsyncadapter.net.FeedParser; 36 import com.example.android.basicsyncadapter.provider.FeedContract; 37 38 import org.xmlpull.v1.XmlPullParserException; 39 40 import java.io.IOException; 41 import java.io.InputStream; 42 import java.net.HttpURLConnection; 43 import java.net.MalformedURLException; 44 import java.net.URL; 45 import java.text.ParseException; 46 import java.util.ArrayList; 47 import java.util.HashMap; 48 import java.util.List; 49 50 /** 51 * Define a sync adapter for the app. 52 * 53 * <p>This class is instantiated in {@link SyncService}, which also binds SyncAdapter to the system. 54 * SyncAdapter should only be initialized in SyncService, never anywhere else. 55 * 56 * <p>The system calls onPerformSync() via an RPC call through the IBinder object supplied by 57 * SyncService. 58 */ 59 class SyncAdapter extends AbstractThreadedSyncAdapter { 60 public static final String TAG = "SyncAdapter"; 61 62 /** 63 * URL to fetch content from during a sync. 64 * 65 * <p>This points to the Android Developers Blog. (Side note: We highly recommend reading the 66 * Android Developer Blog to stay up to date on the latest Android platform developments!) 67 */ 68 private static final String FEED_URL = "http://android-developers.blogspot.com/atom.xml"; 69 70 /** 71 * Network connection timeout, in milliseconds. 72 */ 73 private static final int NET_CONNECT_TIMEOUT_MILLIS = 15000; // 15 seconds 74 75 /** 76 * Network read timeout, in milliseconds. 77 */ 78 private static final int NET_READ_TIMEOUT_MILLIS = 10000; // 10 seconds 79 80 /** 81 * Content resolver, for performing database operations. 82 */ 83 private final ContentResolver mContentResolver; 84 85 /** 86 * Project used when querying content provider. Returns all known fields. 87 */ 88 private static final String[] PROJECTION = new String[] { 89 FeedContract.Entry._ID, 90 FeedContract.Entry.COLUMN_NAME_ENTRY_ID, 91 FeedContract.Entry.COLUMN_NAME_TITLE, 92 FeedContract.Entry.COLUMN_NAME_LINK, 93 FeedContract.Entry.COLUMN_NAME_PUBLISHED}; 94 95 // Constants representing column positions from PROJECTION. 96 public static final int COLUMN_ID = 0; 97 public static final int COLUMN_ENTRY_ID = 1; 98 public static final int COLUMN_TITLE = 2; 99 public static final int COLUMN_LINK = 3; 100 public static final int COLUMN_PUBLISHED = 4; 101 102 /** 103 * Constructor. Obtains handle to content resolver for later use. 104 */ SyncAdapter(Context context, boolean autoInitialize)105 public SyncAdapter(Context context, boolean autoInitialize) { 106 super(context, autoInitialize); 107 mContentResolver = context.getContentResolver(); 108 } 109 110 /** 111 * Constructor. Obtains handle to content resolver for later use. 112 */ 113 @TargetApi(Build.VERSION_CODES.HONEYCOMB) SyncAdapter(Context context, boolean autoInitialize, boolean allowParallelSyncs)114 public SyncAdapter(Context context, boolean autoInitialize, boolean allowParallelSyncs) { 115 super(context, autoInitialize, allowParallelSyncs); 116 mContentResolver = context.getContentResolver(); 117 } 118 119 /** 120 * Called by the Android system in response to a request to run the sync adapter. The work 121 * required to read data from the network, parse it, and store it in the content provider is 122 * done here. Extending AbstractThreadedSyncAdapter ensures that all methods within SyncAdapter 123 * run on a background thread. For this reason, blocking I/O and other long-running tasks can be 124 * run <em>in situ</em>, and you don't have to set up a separate thread for them. 125 . 126 * 127 * <p>This is where we actually perform any work required to perform a sync. 128 * {@link android.content.AbstractThreadedSyncAdapter} guarantees that this will be called on a non-UI thread, 129 * so it is safe to peform blocking I/O here. 130 * 131 * <p>The syncResult argument allows you to pass information back to the method that triggered 132 * the sync. 133 */ 134 @Override onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider, SyncResult syncResult)135 public void onPerformSync(Account account, Bundle extras, String authority, 136 ContentProviderClient provider, SyncResult syncResult) { 137 Log.i(TAG, "Beginning network synchronization"); 138 try { 139 final URL location = new URL(FEED_URL); 140 InputStream stream = null; 141 142 try { 143 Log.i(TAG, "Streaming data from network: " + location); 144 stream = downloadUrl(location); 145 updateLocalFeedData(stream, syncResult); 146 // Makes sure that the InputStream is closed after the app is 147 // finished using it. 148 } finally { 149 if (stream != null) { 150 stream.close(); 151 } 152 } 153 } catch (MalformedURLException e) { 154 Log.e(TAG, "Feed URL is malformed", e); 155 syncResult.stats.numParseExceptions++; 156 return; 157 } catch (IOException e) { 158 Log.e(TAG, "Error reading from network: " + e.toString()); 159 syncResult.stats.numIoExceptions++; 160 return; 161 } catch (XmlPullParserException e) { 162 Log.e(TAG, "Error parsing feed: " + e.toString()); 163 syncResult.stats.numParseExceptions++; 164 return; 165 } catch (ParseException e) { 166 Log.e(TAG, "Error parsing feed: " + e.toString()); 167 syncResult.stats.numParseExceptions++; 168 return; 169 } catch (RemoteException e) { 170 Log.e(TAG, "Error updating database: " + e.toString()); 171 syncResult.databaseError = true; 172 return; 173 } catch (OperationApplicationException e) { 174 Log.e(TAG, "Error updating database: " + e.toString()); 175 syncResult.databaseError = true; 176 return; 177 } 178 Log.i(TAG, "Network synchronization complete"); 179 } 180 181 /** 182 * Read XML from an input stream, storing it into the content provider. 183 * 184 * <p>This is where incoming data is persisted, committing the results of a sync. In order to 185 * minimize (expensive) disk operations, we compare incoming data with what's already in our 186 * database, and compute a merge. Only changes (insert/update/delete) will result in a database 187 * write. 188 * 189 * <p>As an additional optimization, we use a batch operation to perform all database writes at 190 * once. 191 * 192 * <p>Merge strategy: 193 * 1. Get cursor to all items in feed<br/> 194 * 2. For each item, check if it's in the incoming data.<br/> 195 * a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform 196 * database UPDATE.<br/> 197 * b. NO: Schedule DELETE from database.<br/> 198 * (At this point, incoming database only contains missing items.)<br/> 199 * 3. For any items remaining in incoming list, ADD to database. 200 */ updateLocalFeedData(final InputStream stream, final SyncResult syncResult)201 public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) 202 throws IOException, XmlPullParserException, RemoteException, 203 OperationApplicationException, ParseException { 204 final FeedParser feedParser = new FeedParser(); 205 final ContentResolver contentResolver = getContext().getContentResolver(); 206 207 Log.i(TAG, "Parsing stream as Atom feed"); 208 final List<FeedParser.Entry> entries = feedParser.parse(stream); 209 Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries"); 210 211 212 ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); 213 214 // Build hash table of incoming entries 215 HashMap<String, FeedParser.Entry> entryMap = new HashMap<String, FeedParser.Entry>(); 216 for (FeedParser.Entry e : entries) { 217 entryMap.put(e.id, e); 218 } 219 220 // Get list of all items 221 Log.i(TAG, "Fetching local entries for merge"); 222 Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries 223 Cursor c = contentResolver.query(uri, PROJECTION, null, null, null); 224 assert c != null; 225 Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution..."); 226 227 // Find stale data 228 int id; 229 String entryId; 230 String title; 231 String link; 232 long published; 233 while (c.moveToNext()) { 234 syncResult.stats.numEntries++; 235 id = c.getInt(COLUMN_ID); 236 entryId = c.getString(COLUMN_ENTRY_ID); 237 title = c.getString(COLUMN_TITLE); 238 link = c.getString(COLUMN_LINK); 239 published = c.getLong(COLUMN_PUBLISHED); 240 FeedParser.Entry match = entryMap.get(entryId); 241 if (match != null) { 242 // Entry exists. Remove from entry map to prevent insert later. 243 entryMap.remove(entryId); 244 // Check to see if the entry needs to be updated 245 Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon() 246 .appendPath(Integer.toString(id)).build(); 247 if ((match.title != null && !match.title.equals(title)) || 248 (match.link != null && !match.link.equals(link)) || 249 (match.published != published)) { 250 // Update existing record 251 Log.i(TAG, "Scheduling update: " + existingUri); 252 batch.add(ContentProviderOperation.newUpdate(existingUri) 253 .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, match.title) 254 .withValue(FeedContract.Entry.COLUMN_NAME_LINK, match.link) 255 .withValue(FeedContract.Entry.COLUMN_NAME_PUBLISHED, match.published) 256 .build()); 257 syncResult.stats.numUpdates++; 258 } else { 259 Log.i(TAG, "No action: " + existingUri); 260 } 261 } else { 262 // Entry doesn't exist. Remove it from the database. 263 Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon() 264 .appendPath(Integer.toString(id)).build(); 265 Log.i(TAG, "Scheduling delete: " + deleteUri); 266 batch.add(ContentProviderOperation.newDelete(deleteUri).build()); 267 syncResult.stats.numDeletes++; 268 } 269 } 270 c.close(); 271 272 // Add new items 273 for (FeedParser.Entry e : entryMap.values()) { 274 Log.i(TAG, "Scheduling insert: entry_id=" + e.id); 275 batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI) 276 .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id) 277 .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, e.title) 278 .withValue(FeedContract.Entry.COLUMN_NAME_LINK, e.link) 279 .withValue(FeedContract.Entry.COLUMN_NAME_PUBLISHED, e.published) 280 .build()); 281 syncResult.stats.numInserts++; 282 } 283 Log.i(TAG, "Merge solution ready. Applying batch update"); 284 mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch); 285 mContentResolver.notifyChange( 286 FeedContract.Entry.CONTENT_URI, // URI where data was modified 287 null, // No local observer 288 false); // IMPORTANT: Do not sync to network 289 // This sample doesn't support uploads, but if *your* code does, make sure you set 290 // syncToNetwork=false in the line above to prevent duplicate syncs. 291 } 292 293 /** 294 * Given a string representation of a URL, sets up a connection and gets an input stream. 295 */ downloadUrl(final URL url)296 private InputStream downloadUrl(final URL url) throws IOException { 297 HttpURLConnection conn = (HttpURLConnection) url.openConnection(); 298 conn.setReadTimeout(NET_READ_TIMEOUT_MILLIS /* milliseconds */); 299 conn.setConnectTimeout(NET_CONNECT_TIMEOUT_MILLIS /* milliseconds */); 300 conn.setRequestMethod("GET"); 301 conn.setDoInput(true); 302 // Starts the query 303 conn.connect(); 304 return conn.getInputStream(); 305 } 306 } 307