Commit e527e307 authored by Torsten Grote's avatar Torsten Grote

Implement background task for fetching RSS feeds

* Implemented in briar-core as a `ScheduledExecutorService`
  that gets started when the app starts
* The briar-api has a `FeedManager` interface
  that the UI can use to register and unregister feeds
* In this first iteration, feeds are fetched via HTTP(S), not Tor

Closes #484
parent 4af5dbb4
......@@ -14,6 +14,7 @@ dependencies {
// This shouldn't be necessary; per section 23.4.4 of the Gradle docs:
// "file dependencies are included in transitive project dependencies within the same build".
compile files('../briar-core/libs/jsocks.jar')
compile "com.android.support:support-v4:$supportVersion"
compile("com.android.support:appcompat-v7:$supportVersion") {
exclude module: 'support-v4'
......
......@@ -59,4 +59,14 @@
-dontnote android.support.**
-dontnote dagger.**
-dontwarn dagger.**
-dontwarn com.google.common.**
\ No newline at end of file
-dontwarn com.google.common.**
# RSS libraries
-keep class com.rometools.rome.feed.synd.impl.** { *;}
-keep class com.rometools.rome.io.impl.** { *;}
-dontwarn javax.xml.stream.**
-dontwarn org.jaxen.**
-dontwarn java.nio.**
-dontwarn org.codehaus.mojo.animal_sniffer.**
-dontwarn org.slf4j.impl.**
package org.briarproject.api.feed;
import org.briarproject.api.FormatException;
import org.briarproject.api.data.BdfDictionary;
import org.briarproject.api.data.BdfEntry;
import org.briarproject.api.sync.GroupId;
import org.jetbrains.annotations.Nullable;
import static org.briarproject.api.feed.FeedConstants.KEY_BLOG_GROUP_ID;
import static org.briarproject.api.feed.FeedConstants.KEY_FEED_ADDED;
import static org.briarproject.api.feed.FeedConstants.KEY_FEED_AUTHOR;
import static org.briarproject.api.feed.FeedConstants.KEY_FEED_DESC;
import static org.briarproject.api.feed.FeedConstants.KEY_FEED_TITLE;
import static org.briarproject.api.feed.FeedConstants.KEY_FEED_UPDATED;
import static org.briarproject.api.feed.FeedConstants.KEY_FEED_URL;
public class Feed {
final private String url;
final private GroupId blogId;
final private String title, description, author;
final private long added, updated;
public Feed(String url, GroupId blogId, @Nullable String title,
@Nullable String description, @Nullable String author,
long added, long updated) {
this.url = url;
this.blogId = blogId;
this.title = title;
this.description = description;
this.author = author;
this.added = added;
this.updated = updated;
}
public String getUrl() {
return url;
}
public GroupId getBlogId() {
return blogId;
}
public BdfDictionary toBdfDictionary() {
BdfDictionary d = BdfDictionary.of(
new BdfEntry(KEY_FEED_URL, url),
new BdfEntry(KEY_BLOG_GROUP_ID, blogId.getBytes()),
new BdfEntry(KEY_FEED_ADDED, added),
new BdfEntry(KEY_FEED_UPDATED, updated)
);
if (title != null) d.put(KEY_FEED_TITLE, title);
if (description != null) d.put(KEY_FEED_DESC, description);
if (author != null) d.put(KEY_FEED_AUTHOR, author);
return d;
}
public static Feed from(BdfDictionary d) throws FormatException {
String url = d.getString(KEY_FEED_URL);
GroupId blogId = new GroupId(d.getRaw(KEY_BLOG_GROUP_ID));
String title = d.getOptionalString(KEY_FEED_TITLE);
String desc = d.getOptionalString(KEY_FEED_DESC);
String author = d.getOptionalString(KEY_FEED_AUTHOR);
long added = d.getLong(KEY_FEED_ADDED, 0L);
long updated = d.getLong(KEY_FEED_UPDATED, 0L);
return new Feed(url, blogId, title, desc, author, added, updated);
}
public String getTitle() {
return title;
}
public String getDescription() {
return description;
}
public String getAuthor() {
return author;
}
public long getAdded() {
return added;
}
public long getUpdated() {
return updated;
}
}
package org.briarproject.api.feed;
public interface FeedConstants {
/* delay after start before fetching feed, in minutes */
int FETCH_DELAY_INITIAL = 1;
/* the interval the feed should be fetched, in minutes */
int FETCH_INTERVAL = 30;
// group metadata keys
String KEY_FEEDS = "feeds";
String KEY_FEED_URL = "feedURL";
String KEY_BLOG_GROUP_ID = "blogGroupId";
String KEY_FEED_TITLE = "feedTitle";
String KEY_FEED_DESC = "feedDesc";
String KEY_FEED_AUTHOR = "feedAuthor";
String KEY_FEED_ADDED = "feedAdded";
String KEY_FEED_UPDATED = "feedUpdated";
}
package org.briarproject.api.feed;
import org.briarproject.api.db.DbException;
import org.briarproject.api.sync.ClientId;
import org.briarproject.api.sync.GroupId;
import java.io.IOException;
import java.util.List;
public interface FeedManager {
/** Returns the unique ID of the client. */
ClientId getClientId();
/** Adds a RSS feed. */
void addFeed(String url, GroupId g) throws DbException, IOException;
/** Removes a RSS feed. */
void removeFeed(String url) throws DbException;
/** Gets a list of all added RSS feeds */
List<Feed> getFeeds() throws DbException;
}
......@@ -9,12 +9,22 @@ dependencies {
compile fileTree(dir: 'libs', include: '*.jar')
compile "com.madgag.spongycastle:core:1.54.0.0"
compile "com.h2database:h2:1.4.190"
compile 'com.rometools:rome:1.7.0'
compile 'org.jdom:jdom2:2.0.6'
compile 'org.slf4j:slf4j-api:1.7.21'
compile 'com.squareup.okhttp3:okhttp:3.3.1'
}
dependencyVerification {
verify = [
'com.madgag.spongycastle:core:1e7fa4b19ccccd1011364ab838d0b4702470c178bbbdd94c5c90b2d4d749ea1e',
'com.h2database:h2:23ba495a07bbbb3bd6c3084d10a96dad7a23741b8b6d64b213459a784195a98c',
'com.rometools:rome:3096b7a36c0e54f59b8193c431d28494c6bfa85c72ef3c5f341cdf09eae815e6',
'org.jdom:jdom2:1345f11ba606d15603d6740551a8c21947c0215640770ec67271fe78bea97cf5',
'org.slf4j:slf4j-api:1d5aeb6bd98b0fdd151269eae941c05f6468a791ea0f1e68d8e7fe518af3e7df',
'com.squareup.okhttp3:okhttp:a47f4efa166551cd5acc04f1071d82dafbf05638c21f9ca13068bc6633e3bff6',
'com.rometools:rome-utils:2be18a1edc601c31fe49c2000bb5484dd75182309270c2a2561d71888d81587a',
'com.squareup.okio:okio:5cfea5afe6c6e441a4dbf6053a07a733b1249d1009382eb44ac2255ccedd0c15',
]
}
......
......@@ -4,6 +4,7 @@ import org.briarproject.blogs.BlogsModule;
import org.briarproject.contact.ContactModule;
import org.briarproject.crypto.CryptoModule;
import org.briarproject.db.DatabaseExecutorModule;
import org.briarproject.feed.FeedModule;
import org.briarproject.forum.ForumModule;
import org.briarproject.identity.IdentityModule;
import org.briarproject.introduction.IntroductionModule;
......@@ -47,4 +48,6 @@ public interface CoreEagerSingletons {
void inject(SystemModule.EagerSingletons init);
void inject(TransportModule.EagerSingletons init);
void inject(FeedModule.EagerSingletons init);
}
......@@ -8,6 +8,7 @@ import org.briarproject.data.DataModule;
import org.briarproject.db.DatabaseExecutorModule;
import org.briarproject.db.DatabaseModule;
import org.briarproject.event.EventModule;
import org.briarproject.feed.FeedModule;
import org.briarproject.forum.ForumModule;
import org.briarproject.identity.IdentityModule;
import org.briarproject.introduction.IntroductionModule;
......@@ -51,7 +52,8 @@ import dagger.Module;
SharingModule.class,
SyncModule.class,
SystemModule.class,
TransportModule.class
TransportModule.class,
FeedModule.class
})
public class CoreModule {
......@@ -71,5 +73,6 @@ public class CoreModule {
c.inject(new SystemModule.EagerSingletons());
c.inject(new TransportModule.EagerSingletons());
c.inject(new IntroductionModule.EagerSingletons());
c.inject(new FeedModule.EagerSingletons());
}
}
package org.briarproject.feed;
import com.rometools.rome.feed.synd.SyndFeed;
import com.rometools.rome.io.FeedException;
import com.rometools.rome.io.SyndFeedInput;
import com.rometools.rome.io.XmlReader;
import org.briarproject.api.FormatException;
import org.briarproject.api.blogs.BlogManager;
import org.briarproject.api.clients.Client;
import org.briarproject.api.clients.ClientHelper;
import org.briarproject.api.clients.PrivateGroupFactory;
import org.briarproject.api.data.BdfDictionary;
import org.briarproject.api.data.BdfEntry;
import org.briarproject.api.data.BdfList;
import org.briarproject.api.db.DatabaseComponent;
import org.briarproject.api.db.DbException;
import org.briarproject.api.db.Transaction;
import org.briarproject.api.feed.Feed;
import org.briarproject.api.feed.FeedManager;
import org.briarproject.api.lifecycle.IoExecutor;
import org.briarproject.api.lifecycle.Service;
import org.briarproject.api.lifecycle.ServiceException;
import org.briarproject.api.sync.ClientId;
import org.briarproject.api.sync.Group;
import org.briarproject.api.sync.GroupId;
import org.briarproject.util.StringUtils;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.logging.Logger;
import javax.inject.Inject;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.logging.Level.WARNING;
import static org.briarproject.api.feed.FeedConstants.FETCH_DELAY_INITIAL;
import static org.briarproject.api.feed.FeedConstants.FETCH_INTERVAL;
import static org.briarproject.api.feed.FeedConstants.KEY_FEEDS;
class FeedManagerImpl implements FeedManager, Service, Client {
private static final Logger LOG =
Logger.getLogger(FeedManagerImpl.class.getName());
private static final ClientId CLIENT_ID =
new ClientId(StringUtils.fromHexString(
"466565644d616e6167657202fb797097"
+ "255af837abbf8c16e250b3c2ccc286eb"));
private final ScheduledExecutorService feedExecutor;
private final Executor ioExecutor;
private final DatabaseComponent db;
private final PrivateGroupFactory privateGroupFactory;
private final ClientHelper clientHelper;
private final BlogManager blogManager;
@Inject
FeedManagerImpl(ScheduledExecutorService feedExecutor,
@IoExecutor Executor ioExecutor, DatabaseComponent db,
PrivateGroupFactory privateGroupFactory, ClientHelper clientHelper,
BlogManager blogManager) {
this.feedExecutor = feedExecutor;
this.ioExecutor = ioExecutor;
this.db = db;
this.privateGroupFactory = privateGroupFactory;
this.clientHelper = clientHelper;
this.blogManager = blogManager;
}
@Override
public ClientId getClientId() {
return CLIENT_ID;
}
@Override
public void startService() throws ServiceException {
Runnable fetcher = new Runnable() {
public void run() {
ioExecutor.execute(new Runnable() {
@Override
public void run() {
fetchFeeds();
}
});
}
};
feedExecutor.scheduleWithFixedDelay(fetcher, FETCH_DELAY_INITIAL,
FETCH_INTERVAL, MINUTES);
}
@Override
public void stopService() throws ServiceException {
// feedExecutor will be stopped by LifecycleManager
}
@Override
public void createLocalState(Transaction txn) throws DbException {
Group g = getLocalGroup();
// Return if we've already set the local group up
if (db.containsGroup(txn, g.getId())) return;
// Store the group
db.addGroup(txn, g);
// Add initial metadata
List<Feed> feeds = new ArrayList<Feed>(0);
storeFeeds(txn, feeds);
}
@Override
public void addFeed(String url, GroupId g) throws DbException, IOException {
Feed feed;
try {
SyndFeed f = getSyndFeed(getFeedInputStream(url));
String title = f.getTitle();
String description = f.getDescription();
String author = f.getAuthor();
long added = System.currentTimeMillis();
feed = new Feed(url, g, title, description, author, added, added);
} catch (FeedException e) {
throw new IOException(e);
}
Transaction txn = db.startTransaction(false);
try {
List<Feed> feeds = getFeeds(txn);
feeds.add(feed);
storeFeeds(feeds);
} finally {
db.endTransaction(txn);
}
}
@Override
public void removeFeed(String url) throws DbException {
Transaction txn = db.startTransaction(false);
try {
List<Feed> feeds = getFeeds(txn);
boolean found = false;
for (Feed feed : feeds) {
if (feed.getUrl().equals(url)) {
found = true;
feeds.remove(feed);
}
}
if (!found) throw new DbException();
storeFeeds(txn, feeds);
} finally {
db.endTransaction(txn);
}
}
@Override
public List<Feed> getFeeds() throws DbException {
List<Feed> feeds;
Transaction txn = db.startTransaction(true);
try {
feeds = getFeeds(txn);
txn.setComplete();
} finally {
db.endTransaction(txn);
}
return feeds;
}
private List<Feed> getFeeds(Transaction txn) throws DbException {
List<Feed> feeds = new ArrayList<Feed>();
Group g = getLocalGroup();
try {
BdfDictionary d =
clientHelper.getGroupMetadataAsDictionary(txn, g.getId());
for (Object object : d.getList(KEY_FEEDS)) {
if (!(object instanceof BdfDictionary))
throw new FormatException();
feeds.add(Feed.from((BdfDictionary) object));
}
} catch (FormatException e) {
throw new DbException(e);
}
return feeds;
}
private void storeFeeds(Transaction txn, List<Feed> feeds)
throws DbException {
BdfList feedList = new BdfList();
for (Feed feed : feeds) {
feedList.add(feed.toBdfDictionary());
}
BdfDictionary gm = BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList));
try {
if (txn == null) {
clientHelper.mergeGroupMetadata(getLocalGroup().getId(), gm);
} else {
clientHelper
.mergeGroupMetadata(txn, getLocalGroup().getId(), gm);
}
} catch (FormatException e) {
throw new DbException(e);
}
}
private void storeFeeds(List<Feed> feeds) throws DbException {
storeFeeds(null, feeds);
}
private void fetchFeeds() {
// Get current feeds
List<Feed> feeds;
try {
feeds = getFeeds();
} catch (DbException e) {
if (LOG.isLoggable(WARNING))
LOG.log(WARNING, e.toString(), e);
return;
}
// Fetch and update all feeds
List<Feed> newFeeds = new ArrayList<Feed>(feeds.size());
for (Feed feed : feeds) {
newFeeds.add(fetchFeed(feed));
}
// Store updated feeds
try {
storeFeeds(newFeeds);
} catch (DbException e) {
if (LOG.isLoggable(WARNING))
LOG.log(WARNING, e.toString(), e);
}
}
private Feed fetchFeed(Feed feed) {
LOG.info("Updating RSS feeds...");
String title, description, author;
long updated = System.currentTimeMillis();
try {
SyndFeed f = getSyndFeed(getFeedInputStream(feed.getUrl()));
title = f.getTitle();
description = f.getDescription();
author = f.getAuthor();
// TODO keep track of which entries have been seen (#485)
// TODO Pass any new entries down the pipeline to be posted (#486)
} catch (FeedException e) {
if (LOG.isLoggable(WARNING))
LOG.log(WARNING, e.toString(), e);
return feed;
} catch (IOException e) {
if (LOG.isLoggable(WARNING))
LOG.log(WARNING, e.toString(), e);
return feed;
}
return new Feed(feed.getUrl(), feed.getBlogId(), title, description,
author, feed.getAdded(), updated);
}
private InputStream getFeedInputStream(String url) throws IOException {
// Set proxy
// TODO verify and use local Tor proxy address/port
String proxyHost = "localhost";
int proxyPort = 59050;
Proxy proxy = new Proxy(Proxy.Type.HTTP,
new InetSocketAddress(proxyHost, proxyPort));
// Build HTTP Client
OkHttpClient client = new OkHttpClient.Builder()
// .proxy(proxy)
.build();
// Build Request
Request request = new Request.Builder()
.url(url)
.build();
// Execute Request
Response response = client.newCall(request).execute();
return response.body().byteStream();
}
private SyndFeed getSyndFeed(InputStream stream)
throws IOException, FeedException {
SyndFeedInput input = new SyndFeedInput();
return input.build(new XmlReader(stream));
}
private Group getLocalGroup() {
return privateGroupFactory.createLocalGroup(getClientId());
}
}
package org.briarproject.feed;
import org.briarproject.api.feed.FeedManager;
import org.briarproject.api.lifecycle.LifecycleManager;
import javax.inject.Inject;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
@Module
public class FeedModule {
public static class EagerSingletons {
@Inject
FeedManager feedManager;
}
@Provides
@Singleton
FeedManager provideFeedManager(FeedManagerImpl feedManager,
LifecycleManager lifecycleManager) {
lifecycleManager.registerClient(feedManager);
lifecycleManager.registerService(feedManager);
return feedManager;
}
}
......@@ -138,6 +138,7 @@ class PluginManagerImpl implements PluginManager, Service {
}
// Wait for all the plugins to stop
try {
LOG.info("Waiting for all the plugins to stop");
stopLatch.await();
} catch (InterruptedException e) {
throw new ServiceException(e);
......@@ -227,6 +228,8 @@ class PluginManagerImpl implements PluginManager, Service {
@Override
public void run() {
if (LOG.isLoggable(INFO))
LOG.info("Trying to stop plugin " + plugin.getId());
try {
// Wait for the plugin to finish starting
startLatch.await();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment