mirror of
https://github.com/moonlight-stream/moonlight-android.git
synced 2025-07-19 11:03:01 +00:00
Rewrite a large portion of the computer manager service to fix some thread leaks and improve performance
This commit is contained in:
parent
afbe64f3ff
commit
99e3b5f33b
@ -1,8 +1,7 @@
|
|||||||
package com.limelight.computers;
|
package com.limelight.computers;
|
||||||
|
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.util.HashMap;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import com.limelight.LimeLog;
|
import com.limelight.LimeLog;
|
||||||
@ -30,7 +29,7 @@ public class ComputerManagerService extends Service {
|
|||||||
private AtomicInteger dbRefCount = new AtomicInteger(0);
|
private AtomicInteger dbRefCount = new AtomicInteger(0);
|
||||||
|
|
||||||
private IdentityManager idManager;
|
private IdentityManager idManager;
|
||||||
private final HashMap<ComputerDetails, Thread> pollingThreads = new HashMap<ComputerDetails, Thread>();
|
private final LinkedList<PollingTuple> pollingTuples = new LinkedList<PollingTuple>();
|
||||||
private ComputerManagerListener listener = null;
|
private ComputerManagerListener listener = null;
|
||||||
private AtomicInteger activePolls = new AtomicInteger(0);
|
private AtomicInteger activePolls = new AtomicInteger(0);
|
||||||
|
|
||||||
@ -102,20 +101,8 @@ public class ComputerManagerService extends Service {
|
|||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
while (!isInterrupted()) {
|
while (!isInterrupted()) {
|
||||||
ComputerDetails originalDetails = new ComputerDetails();
|
|
||||||
originalDetails.update(details);
|
|
||||||
|
|
||||||
// Check if this poll has modified the details
|
// Check if this poll has modified the details
|
||||||
if (runPoll(details) && !originalDetails.equals(details)) {
|
runPoll(details);
|
||||||
// Replace our thread entry with the new one
|
|
||||||
synchronized (pollingThreads) {
|
|
||||||
if (pollingThreads.remove(originalDetails) != null) {
|
|
||||||
// This could have gone away in the meantime, so don't
|
|
||||||
// add it back if it has
|
|
||||||
pollingThreads.put(details, this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait until the next polling interval
|
// Wait until the next polling interval
|
||||||
try {
|
try {
|
||||||
@ -138,23 +125,15 @@ public class ComputerManagerService extends Service {
|
|||||||
// Start mDNS autodiscovery too
|
// Start mDNS autodiscovery too
|
||||||
discoveryBinder.startDiscovery(MDNS_QUERY_PERIOD_MS);
|
discoveryBinder.startDiscovery(MDNS_QUERY_PERIOD_MS);
|
||||||
|
|
||||||
// Start polling known machines
|
synchronized (pollingTuples) {
|
||||||
if (!getLocalDatabaseReference()) {
|
for (PollingTuple tuple : pollingTuples) {
|
||||||
return;
|
|
||||||
}
|
|
||||||
List<ComputerDetails> computerList = dbManager.getAllComputers();
|
|
||||||
releaseLocalDatabaseReference();
|
|
||||||
|
|
||||||
synchronized (pollingThreads) {
|
|
||||||
for (ComputerDetails computer : computerList) {
|
|
||||||
// This polling thread might already be there
|
// This polling thread might already be there
|
||||||
if (!pollingThreads.containsKey(computer)) {
|
if (tuple.thread == null) {
|
||||||
// Report this computer initially
|
// Report this computer initially
|
||||||
listener.notifyComputerUpdated(computer);
|
listener.notifyComputerUpdated(tuple.computer);
|
||||||
|
|
||||||
Thread t = createPollingThread(computer);
|
tuple.thread = createPollingThread(tuple.computer);
|
||||||
pollingThreads.put(computer, t);
|
tuple.thread.start();
|
||||||
t.start();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -208,11 +187,14 @@ public class ComputerManagerService extends Service {
|
|||||||
discoveryBinder.stopDiscovery();
|
discoveryBinder.stopDiscovery();
|
||||||
|
|
||||||
// Stop polling
|
// Stop polling
|
||||||
synchronized (pollingThreads) {
|
synchronized (pollingTuples) {
|
||||||
for (Thread t : pollingThreads.values()) {
|
for (PollingTuple tuple : pollingTuples) {
|
||||||
t.interrupt();
|
if (tuple.thread != null) {
|
||||||
|
// Interrupt and remove the thread
|
||||||
|
tuple.thread.interrupt();
|
||||||
|
tuple.thread = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pollingThreads.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove the listener
|
// Remove the listener
|
||||||
@ -249,13 +231,26 @@ public class ComputerManagerService extends Service {
|
|||||||
fakeDetails.remoteIp = addr;
|
fakeDetails.remoteIp = addr;
|
||||||
|
|
||||||
// Spawn a thread for this computer
|
// Spawn a thread for this computer
|
||||||
synchronized (pollingThreads) {
|
synchronized (pollingTuples) {
|
||||||
// This polling thread might already be there
|
// This polling thread might already be there
|
||||||
if (!pollingThreads.containsKey(fakeDetails)) {
|
for (PollingTuple tuple : pollingTuples) {
|
||||||
Thread t = createPollingThread(fakeDetails);
|
if (tuple.computer.localIp.equals(addr) ||
|
||||||
pollingThreads.put(fakeDetails, t);
|
tuple.computer.remoteIp.equals(addr)) {
|
||||||
t.start();
|
// This is the same computer
|
||||||
|
if (tuple.thread == null) {
|
||||||
|
tuple.thread = createPollingThread(fakeDetails);
|
||||||
|
tuple.thread.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Found an entry so we're done
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If we got here, we didn't find an entry
|
||||||
|
PollingTuple tuple = new PollingTuple(fakeDetails, createPollingThread(fakeDetails));
|
||||||
|
pollingTuples.add(tuple);
|
||||||
|
tuple.thread.start();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -280,6 +275,20 @@ public class ComputerManagerService extends Service {
|
|||||||
// Remove it from the database
|
// Remove it from the database
|
||||||
dbManager.deleteComputer(name);
|
dbManager.deleteComputer(name);
|
||||||
|
|
||||||
|
synchronized (pollingTuples) {
|
||||||
|
// Remove the computer from the computer list
|
||||||
|
for (PollingTuple tuple : pollingTuples) {
|
||||||
|
if (tuple.computer.name.equals(name)) {
|
||||||
|
if (tuple.thread != null) {
|
||||||
|
// Interrupt the thread on this entry
|
||||||
|
tuple.thread.interrupt();
|
||||||
|
}
|
||||||
|
pollingTuples.remove(tuple);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
releaseLocalDatabaseReference();
|
releaseLocalDatabaseReference();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -376,6 +385,18 @@ public class ComputerManagerService extends Service {
|
|||||||
// Initialize the DB
|
// Initialize the DB
|
||||||
dbManager = new ComputerDatabaseManager(this);
|
dbManager = new ComputerDatabaseManager(this);
|
||||||
dbRefCount.set(1);
|
dbRefCount.set(1);
|
||||||
|
|
||||||
|
// Grab known machines into our computer list
|
||||||
|
if (!getLocalDatabaseReference()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (ComputerDetails computer : dbManager.getAllComputers()) {
|
||||||
|
// Add this computer without a thread
|
||||||
|
pollingTuples.add(new PollingTuple(computer, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
releaseLocalDatabaseReference();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -396,3 +417,13 @@ public class ComputerManagerService extends Service {
|
|||||||
return binder;
|
return binder;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class PollingTuple {
|
||||||
|
public Thread thread;
|
||||||
|
public ComputerDetails computer;
|
||||||
|
|
||||||
|
public PollingTuple(ComputerDetails computer, Thread thread) {
|
||||||
|
this.computer = computer;
|
||||||
|
this.thread = thread;
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user