switch to Handler for encodingStorage

This commit is contained in:
Alexey Kuznetsov 2020-11-03 19:58:16 +03:00
commit feb06d5f20
7 changed files with 650 additions and 586 deletions

View file

@ -2,16 +2,16 @@ package com.github.axet.audiorecorder.activities;
import android.app.KeyguardManager;
import android.app.ProgressDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
@ -42,6 +42,7 @@ import com.github.axet.androidlibrary.widgets.SearchView;
import com.github.axet.audiolibrary.app.RawSamples;
import com.github.axet.audiorecorder.R;
import com.github.axet.audiorecorder.app.AudioApplication;
import com.github.axet.audiorecorder.app.EncodingStorage;
import com.github.axet.audiorecorder.app.Recordings;
import com.github.axet.audiorecorder.app.Storage;
import com.github.axet.audiorecorder.services.EncodingService;
@ -178,35 +179,39 @@ public class MainActivity extends AppCompatThemeActivity {
}
}
public class EncodingDialog extends BroadcastReceiver {
public class EncodingDialog extends Handler {
Context context;
Snackbar snackbar;
IntentFilter filter = new IntentFilter();
ProgressEncoding d;
long cur;
long total;
Storage storage;
EncodingStorage encodings;
public EncodingDialog() {
filter.addAction(EncodingService.UPDATE_ENCODING);
filter.addAction(EncodingService.ERROR);
}
public void registerReceiver(Context context) {
this.context = context;
context.registerReceiver(this, filter);
storage = new Storage(context);
encodings = ((AudioApplication) context.getApplicationContext()).encodings;
synchronized (encodings.handlers) {
encodings.handlers.add(this);
}
}
public void close() {
context.unregisterReceiver(this);
synchronized (encodings.handlers) {
encodings.handlers.remove(this);
}
}
public String printEncodings(Uri targetUri) {
final long progress = cur * 100 / total;
String p = " (" + progress + "%)";
String str = "";
EncodingService.EncodingStorage storage = new EncodingService.EncodingStorage(new Storage(context));
for (File f : storage.keySet()) {
EncodingService.EncodingStorage.Info n = storage.get(f);
for (File f : encodings.keySet()) {
EncodingStorage.Info n = encodings.get(f);
String name = Storage.getName(context, n.targetUri);
str += "- " + name;
if (n.targetUri.equals(targetUri))
@ -218,11 +223,10 @@ public class MainActivity extends AppCompatThemeActivity {
}
@Override
public void onReceive(final Context context, Intent intent) {
String a = intent.getAction();
if (a == null)
return;
if (a.equals(EncodingService.UPDATE_ENCODING)) {
public void handleMessage(Message msg) {
if (msg.what == EncodingStorage.UPDATE) {
encodings.load();
Intent intent = (Intent) msg.obj;
cur = intent.getLongExtra("cur", -1);
total = intent.getLongExtra("total", -1);
final Uri targetUri = intent.getParcelableExtra("targetUri");
@ -256,7 +260,8 @@ public class MainActivity extends AppCompatThemeActivity {
snackbar.show();
}
}
if (a.equals(EncodingService.DONE_ENCODING)) {
if (msg.what == EncodingStorage.DONE) {
Intent intent = (Intent) msg.obj;
if (d != null) {
d.dismiss();
d = null;
@ -269,7 +274,8 @@ public class MainActivity extends AppCompatThemeActivity {
snackbar.show();
}
}
if (a.equals(EncodingService.ERROR)) {
if (msg.what == EncodingStorage.ERROR) {
Intent intent = (Intent) msg.obj;
if (d != null) {
d.dismiss();
d = null;
@ -294,6 +300,9 @@ public class MainActivity extends AppCompatThemeActivity {
public void onResume() {
if (d != null)
d.onResume(cur);
encodings.load();
if (encodings.isEmpty())
hide();
}
public void Error(final File in, final RawSamples.Info info, Throwable e) {
@ -325,6 +334,13 @@ public class MainActivity extends AppCompatThemeActivity {
}
builder.show();
}
public void hide() {
if (snackbar != null) {
snackbar.dismiss();
snackbar = null;
}
}
}
@Override

View file

@ -50,6 +50,7 @@ import com.github.axet.audiolibrary.widgets.PitchView;
import com.github.axet.audiorecorder.BuildConfig;
import com.github.axet.audiorecorder.R;
import com.github.axet.audiorecorder.app.AudioApplication;
import com.github.axet.audiorecorder.app.RecordingStorage;
import com.github.axet.audiorecorder.app.Storage;
import com.github.axet.audiorecorder.services.BluetoothReceiver;
import com.github.axet.audiorecorder.services.EncodingService;
@ -95,7 +96,7 @@ public class RecordingActivity extends AppCompatThemeActivity {
ScreenReceiver screen;
AudioApplication.RecordingStorage recording;
RecordingStorage recording;
RecordingReceiver receiver;
@ -103,31 +104,31 @@ public class RecordingActivity extends AppCompatThemeActivity {
Handler handler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (msg.what == AudioApplication.RecordingStorage.PINCH)
if (msg.what == RecordingStorage.PINCH)
pitch.add((Double) msg.obj);
if (msg.what == AudioApplication.RecordingStorage.UPDATESAMPLES)
if (msg.what == RecordingStorage.UPDATESAMPLES)
updateSamples((Long) msg.obj);
if (msg.what == AudioApplication.RecordingStorage.PAUSED) {
if (msg.what == RecordingStorage.PAUSED) {
muted = RecordingActivity.startActivity(RecordingActivity.this, "Error", getString(R.string.mic_paused));
if (muted != null) {
AutoClose ac = new AutoClose(muted, 10);
ac.run();
}
}
if (msg.what == AudioApplication.RecordingStorage.MUTED) {
if (msg.what == RecordingStorage.MUTED) {
if (Build.VERSION.SDK_INT >= 28)
muted = RecordingActivity.startActivity(RecordingActivity.this, getString(R.string.mic_muted_error), getString(R.string.mic_muted_pie));
else
muted = RecordingActivity.startActivity(RecordingActivity.this, "Error", getString(R.string.mic_muted_error));
}
if (msg.what == AudioApplication.RecordingStorage.UNMUTED) {
if (msg.what == RecordingStorage.UNMUTED) {
if (muted != null) {
AutoClose run = new AutoClose(muted);
run.run();
muted = null;
}
}
if (msg.what == AudioApplication.RecordingStorage.END) {
if (msg.what == RecordingStorage.END) {
pitch.drawEnd();
if (!recording.interrupt.get()) {
stopRecording(getString(R.string.recording_status_pause));
@ -138,7 +139,7 @@ public class RecordingActivity extends AppCompatThemeActivity {
muted = RecordingActivity.startActivity(RecordingActivity.this, getString(R.string.mic_muted_error), text);
}
}
if (msg.what == AudioApplication.RecordingStorage.ERROR)
if (msg.what == RecordingStorage.ERROR)
Error((Throwable) msg.obj);
}
};
@ -509,7 +510,7 @@ public class RecordingActivity extends AppCompatThemeActivity {
editor.commit();
}
Log.d(TAG, "create recording at: " + targetUri);
app.recording = new AudioApplication.RecordingStorage(this, pitch.getPitchTime(), targetUri);
app.recording = new RecordingStorage(this, pitch.getPitchTime(), targetUri);
}
recording = app.recording;
synchronized (recording.handlers) {

View file

@ -5,13 +5,7 @@ import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Build;
import android.os.Handler;
import android.os.PowerManager;
import android.os.Process;
import android.support.v7.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
@ -19,21 +13,13 @@ import android.view.View;
import com.github.axet.androidlibrary.app.NotificationManagerCompat;
import com.github.axet.androidlibrary.widgets.NotificationChannelCompat;
import com.github.axet.androidlibrary.widgets.RemoteNotificationCompat;
import com.github.axet.audiolibrary.app.RawSamples;
import com.github.axet.audiolibrary.app.Sound;
import com.github.axet.audiolibrary.encoders.Encoder;
import com.github.axet.audiolibrary.encoders.FormatFLAC;
import com.github.axet.audiolibrary.encoders.FormatM4A;
import com.github.axet.audiolibrary.encoders.FormatOGG;
import com.github.axet.audiolibrary.encoders.OnFlyEncoding;
import com.github.axet.audiorecorder.BuildConfig;
import com.github.axet.audiorecorder.R;
import com.github.axet.audiorecorder.activities.MainActivity;
import java.nio.ShortBuffer;
import java.util.ArrayList;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicBoolean;
public class AudioApplication extends com.github.axet.audiolibrary.app.MainApplication {
public static final String PREFERENCE_CONTROLS = "controls";
@ -47,321 +33,19 @@ public class AudioApplication extends com.github.axet.audiolibrary.app.MainAppli
public NotificationChannelCompat channelStatus;
public RecordingStorage recording;
public EncodingStorage encodings;
public static AudioApplication from(Context context) {
return (AudioApplication) com.github.axet.audiolibrary.app.MainApplication.from(context);
}
public static class RecordingStorage {
public static final int PINCH = 1;
public static final int UPDATESAMPLES = 2;
public static final int END = 3;
public static final int ERROR = 4;
public static final int MUTED = 5;
public static final int UNMUTED = 6;
public static final int PAUSED = 7;
public Context context;
public final ArrayList<Handler> handlers = new ArrayList<>();
public Sound sound;
public Storage storage;
public Encoder e;
public AtomicBoolean interrupt = new AtomicBoolean(); // nio throws ClosedByInterruptException if thread interrupted
public Thread thread;
public final Object bufferSizeLock = new Object(); // lock for bufferSize
public int bufferSize; // dynamic buffer size. big for backgound recording. small for realtime view updates.
public int sampleRate; // variable from settings. how may samples per second.
public int samplesUpdate; // pitch size in samples. how many samples count need to update view. 4410 for 100ms update.
public int samplesUpdateStereo; // samplesUpdate * number of channels
public Uri targetUri = null; // output target file 2016-01-01 01.01.01.wav
public long samplesTime; // how many samples passed for current recording, stereo = samplesTime * 2
public ShortBuffer dbBuffer = null; // PinchView samples buffer
public int pitchTime; // screen width
public RecordingStorage(Context context, int pitchTime, Uri targetUri) {
this.context = context;
this.pitchTime = pitchTime;
this.targetUri = targetUri;
storage = new Storage(context);
sound = new Sound(context);
sampleRate = Sound.getSampleRate(context);
samplesUpdate = (int) (pitchTime * sampleRate / 1000f);
samplesUpdateStereo = samplesUpdate * Sound.getChannels(context);
}
public void startRecording() {
sound.silent();
final SharedPreferences shared = android.preference.PreferenceManager.getDefaultSharedPreferences(context);
int user;
if (shared.getString(AudioApplication.PREFERENCE_SOURCE, context.getString(R.string.source_mic)).equals(context.getString(R.string.source_raw))) {
if (Sound.isUnprocessedSupported(context))
user = MediaRecorder.AudioSource.UNPROCESSED;
else
user = MediaRecorder.AudioSource.VOICE_RECOGNITION;
} else {
user = MediaRecorder.AudioSource.MIC;
}
int[] ss = new int[]{
user,
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.DEFAULT
};
if (shared.getBoolean(AudioApplication.PREFERENCE_FLY, false)) {
final OnFlyEncoding fly = new OnFlyEncoding(storage, targetUri, getInfo());
if (e == null) { // do not recreate encoder if on-fly mode enabled
e = new Encoder() {
@Override
public void encode(short[] buf, int pos, int len) {
fly.encode(buf, pos, len);
}
@Override
public void close() {
fly.close();
}
};
}
} else {
final RawSamples rs = new RawSamples(storage.getTempRecording());
rs.open(samplesTime * Sound.getChannels(context));
e = new Encoder() {
@Override
public void encode(short[] buf, int pos, int len) {
rs.write(buf, pos, len);
}
@Override
public void close() {
rs.close();
}
};
}
final AudioRecord recorder = Sound.createAudioRecorder(context, sampleRate, ss, 0);
final Thread old = thread;
final AtomicBoolean oldb = interrupt;
interrupt = new AtomicBoolean(false);
thread = new Thread("RecordingThread") {
@Override
public void run() {
if (old != null) {
oldb.set(true);
old.interrupt();
try {
old.join();
} catch (InterruptedException e) {
return;
}
}
PowerManager pm = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
PowerManager.WakeLock wlcpu = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, BuildConfig.APPLICATION_ID + ":recordinglock");
wlcpu.acquire();
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
boolean silenceDetected = false;
long silence = samplesTime; // last non silence frame
long start = System.currentTimeMillis(); // recording start time
long session = 0; // samples count from start of recording
try {
long last = System.currentTimeMillis();
recorder.startRecording();
int samplesTimeCount = 0;
final int samplesTimeUpdate = 1000 * sampleRate / 1000; // how many samples we need to update 'samples'. time clock. every 1000ms.
short[] buffer = null;
boolean stableRefresh = false;
while (!interrupt.get()) {
synchronized (bufferSizeLock) {
if (buffer == null || buffer.length != bufferSize)
buffer = new short[bufferSize];
}
int readSize = recorder.read(buffer, 0, buffer.length);
if (readSize < 0)
return;
long now = System.currentTimeMillis();
long diff = (now - last) * sampleRate / 1000;
last = now;
int samples = readSize / Sound.getChannels(context); // mono samples (for booth channels)
if (stableRefresh || diff >= samples) {
stableRefresh = true;
e.encode(buffer, 0, readSize);
short[] dbBuf;
int dbSize;
int readSizeUpdate;
if (dbBuffer != null) {
ShortBuffer bb = ShortBuffer.allocate(dbBuffer.position() + readSize);
dbBuffer.flip();
bb.put(dbBuffer);
bb.put(buffer, 0, readSize);
dbBuf = new short[bb.position()];
dbSize = dbBuf.length;
bb.flip();
bb.get(dbBuf, 0, dbBuf.length);
} else {
dbBuf = buffer;
dbSize = readSize;
}
readSizeUpdate = dbSize / samplesUpdateStereo * samplesUpdateStereo;
for (int i = 0; i < readSizeUpdate; i += samplesUpdateStereo) {
double a = RawSamples.getAmplitude(dbBuf, i, samplesUpdateStereo);
if (a != 0)
silence = samplesTime + (i + samplesUpdateStereo) / Sound.getChannels(context);
double dB = RawSamples.getDB(a);
Post(PINCH, dB);
}
int readSizeLen = dbSize - readSizeUpdate;
if (readSizeLen > 0) {
dbBuffer = ShortBuffer.allocate(readSizeLen);
dbBuffer.put(dbBuf, readSizeUpdate, readSizeLen);
} else {
dbBuffer = null;
}
samplesTime += samples;
samplesTimeCount += samples;
if (samplesTimeCount > samplesTimeUpdate) {
Post(UPDATESAMPLES, samplesTime);
samplesTimeCount -= samplesTimeUpdate;
}
session += samples;
if (samplesTime - silence > 2 * sampleRate) { // 2 second of mic muted
if (!silenceDetected) {
silenceDetected = true;
Post(MUTED, null);
}
} else {
if (silenceDetected) {
silenceDetected = false;
Post(UNMUTED, null);
}
}
diff = (now - start) * sampleRate / 1000; // number of samples we expect by this moment
if (diff - session > 2 * sampleRate) { // 2 second of silence / paused by os
Post(PAUSED, null);
session = diff; // reset
}
}
}
} catch (final RuntimeException e) {
Post(e);
} finally {
wlcpu.release();
// redraw view, we may add one last pich which is not been drawen because draw tread already interrupted.
// to prevent resume recording jump - draw last added pitch here.
Post(END, null);
if (recorder != null)
recorder.release();
if (!shared.getBoolean(AudioApplication.PREFERENCE_FLY, false)) { // keep encoder open if encoding on fly enabled
try {
if (e != null) {
e.close();
e = null;
}
} catch (RuntimeException e) {
Post(e);
}
}
}
}
};
thread.start();
}
public void stopRecording() {
if (thread != null) {
interrupt.set(true);
try {
thread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
thread = null;
}
sound.unsilent();
}
public RawSamples.Info getInfo() {
return new RawSamples.Info(sampleRate, Sound.getChannels(context));
}
// calcuale buffer length dynamically, this way we can reduce thread cycles when activity in background
// or phone screen is off.
public void updateBufferSize(boolean pause) {
synchronized (bufferSizeLock) {
int samplesUpdate;
if (pause) {
// we need make buffer multiply of pitch.getPitchTime() (100 ms).
// to prevent missing blocks from view otherwise:
// file may contain not multiply 'samplesUpdate' count of samples. it is about 100ms.
// we can't show on pitchView sorter then 100ms samples. we can't add partial sample because on
// resumeRecording we have to apply rest of samplesUpdate or reload all samples again
// from file. better then confusing user we cut them on next resumeRecording.
long l = 1000L / pitchTime * pitchTime;
samplesUpdate = (int) (l * sampleRate / 1000.0);
} else {
samplesUpdate = this.samplesUpdate;
}
bufferSize = samplesUpdate * Sound.getChannels(context);
}
}
public boolean isForeground() {
synchronized (bufferSizeLock) {
return bufferSize == this.samplesUpdate * Sound.getChannels(context);
}
}
public void Post(Throwable e) {
Post(ERROR, e);
}
public void Post(int what, Object p) {
synchronized (handlers) {
for (Handler h : handlers)
h.obtainMessage(what, p).sendToTarget();
}
}
}
@Override
public void onCreate() {
super.onCreate();
Log.d(TAG, "onCreate");
channelStatus = new NotificationChannelCompat(this, "status", "Status", NotificationManagerCompat.IMPORTANCE_LOW);
encodings = new EncodingStorage(this);
switch (getVersion(PREFERENCE_VERSION, R.xml.pref_general)) {
case -1:

View file

@ -0,0 +1,229 @@
package com.github.axet.audiorecorder.app;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Handler;
import android.util.Log;
import android.util.Xml;
import com.github.axet.audiolibrary.app.RawSamples;
import com.github.axet.audiolibrary.encoders.FileEncoder;
import com.github.axet.audiolibrary.encoders.OnFlyEncoding;
import com.github.axet.audiolibrary.filters.AmplifierFilter;
import com.github.axet.audiolibrary.filters.SkipSilenceFilter;
import com.github.axet.audiolibrary.filters.VoiceFilter;
import org.apache.commons.io.FileUtils;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.FilenameFilter;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
public class EncodingStorage extends HashMap<File, EncodingStorage.Info> {
public static final String TAG = EncodingStorage.class.getSimpleName();
public static final int UPDATE = 1;
public static final int DONE = 2;
public static final int EXIT = 3;
public static final int ERROR = 4;
public static String JSON_EXT = "json";
public Storage storage;
public FileEncoder encoder;
public final ArrayList<Handler> handlers = new ArrayList<>();
public static File jsonFile(File f) {
return new File(f.getParentFile(), Storage.getNameNoExt(f) + "." + JSON_EXT);
}
public static class Info {
public Uri targetUri;
public RawSamples.Info info;
public Info() {
}
public Info(Uri t, RawSamples.Info i) {
this.targetUri = t;
this.info = i;
}
public Info(String json) throws JSONException {
load(new JSONObject(json));
}
public Info(JSONObject json) throws JSONException {
load(json);
}
public JSONObject save() throws JSONException {
JSONObject json = new JSONObject();
json.put("targetUri", targetUri.toString());
json.put("info", info.save());
return json;
}
public void load(JSONObject json) throws JSONException {
targetUri = Uri.parse(json.getString("targetUri"));
info = new RawSamples.Info(json.getJSONObject("info"));
}
}
public EncodingStorage(Context context) {
storage = new Storage(context);
load();
}
public void load() {
clear();
File storage = this.storage.getTempRecording().getParentFile();
File[] ff = storage.listFiles(new FilenameFilter() {
String start = Storage.getNameNoExt(Storage.TMP_ENC);
String ext = Storage.getExt(Storage.TMP_ENC);
@Override
public boolean accept(File dir, String name) {
return name.startsWith(start) && name.endsWith("." + ext);
}
});
if (ff == null)
return;
for (File f : ff) {
File j = jsonFile(f);
try {
put(f, new Info(new JSONObject(FileUtils.readFileToString(j, Charset.defaultCharset()))));
} catch (Exception e) {
Log.d(TAG, "unable to read json", e);
}
}
}
public File save(File in, Uri targetUri, RawSamples.Info info) {
File to = storage.getTempEncoding();
to = Storage.getNextFile(to);
to = Storage.move(in, to);
try {
File j = jsonFile(to);
Info rec = new Info(targetUri, info);
JSONObject json = rec.save();
FileUtils.writeStringToFile(j, json.toString(), Charset.defaultCharset());
return to;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void filters(FileEncoder encoder, RawSamples.Info info) {
SharedPreferences shared = android.preference.PreferenceManager.getDefaultSharedPreferences(storage.getContext());
if (shared.getBoolean(AudioApplication.PREFERENCE_VOICE, false))
encoder.filters.add(new VoiceFilter(info));
float amp = shared.getFloat(AudioApplication.PREFERENCE_VOLUME, 1);
if (amp != 1)
encoder.filters.add(new AmplifierFilter(amp));
if (shared.getBoolean(AudioApplication.PREFERENCE_SKIP, false))
encoder.filters.add(new SkipSilenceFilter(info));
}
public void startEncoding() {
if (encoder != null)
return;
load();
for (File in : keySet()) {
EncodingStorage.Info info = get(in);
final OnFlyEncoding fly = new OnFlyEncoding(this.storage, info.targetUri, info.info);
encoder = new FileEncoder(storage.getContext(), in, fly);
filters(encoder, info.info);
encoding(encoder, fly, info.info, new Runnable() {
@Override
public void run() {
encoder.close();
encoder = null;
startEncoding();
}
});
return;
}
Post(EXIT, null);
}
public void encoding(final FileEncoder encoder, final OnFlyEncoding fly, final RawSamples.Info info, final Runnable done) {
encoder.run(new Runnable() {
@Override
public void run() { // progress
try {
long cur = encoder.getCurrent();
long total = encoder.getTotal();
Intent intent = new Intent()
.putExtra("cur", cur)
.putExtra("total", total)
.putExtra("info", info.save().toString())
.putExtra("targetUri", fly.targetUri)
.putExtra("targetFile", Storage.getName(storage.getContext(), fly.targetUri));
Post(UPDATE, intent);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
}, new Runnable() {
@Override
public void run() { // success
Storage.delete(encoder.in); // delete raw recording
Storage.delete(EncodingStorage.jsonFile(encoder.in)); // delete json file
Post(DONE, new Intent()
.putExtra("targetUri", fly.targetUri)
);
if (done != null)
done.run();
}
}, new Runnable() {
@Override
public void run() { // or error
Storage.delete(storage.getContext(), fly.targetUri); // fly has fd, delete target manually
try {
Intent intent = new Intent()
.putExtra("in", encoder.in)
.putExtra("info", info.save().toString())
.putExtra("e", encoder.getException());
Post(ERROR, intent);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
}
public void encoding(File in, Uri targetUri, RawSamples.Info info) {
OnFlyEncoding fly = new OnFlyEncoding(storage, targetUri, info);
encoder = new FileEncoder(storage.getContext(), in, fly);
filters(encoder, info);
encoding(encoder, fly, info, null);
}
public void saveAsWAV(File in, File out, RawSamples.Info info) {
OnFlyEncoding fly = new OnFlyEncoding(storage, out, info);
encoder = new FileEncoder(storage.getContext(), in, fly);
encoding(encoder, fly, info, null);
}
public void restart() {
if (encoder != null) {
encoder.close();
encoder = null;
}
startEncoding();
}
public void Post(int what, Object p) {
synchronized (handlers) {
for (Handler h : handlers)
h.obtainMessage(what, p).sendToTarget();
}
}
}

View file

@ -0,0 +1,325 @@
package com.github.axet.audiorecorder.app;
import android.content.Context;
import android.content.SharedPreferences;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Handler;
import android.os.PowerManager;
import android.os.Process;
import com.github.axet.audiolibrary.app.RawSamples;
import com.github.axet.audiolibrary.app.Sound;
import com.github.axet.audiolibrary.encoders.Encoder;
import com.github.axet.audiolibrary.encoders.OnFlyEncoding;
import com.github.axet.audiorecorder.BuildConfig;
import com.github.axet.audiorecorder.R;
import java.nio.ShortBuffer;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
public class RecordingStorage {
public static final int PINCH = 1;
public static final int UPDATESAMPLES = 2;
public static final int END = 3;
public static final int ERROR = 4;
public static final int MUTED = 5;
public static final int UNMUTED = 6;
public static final int PAUSED = 7;
public Context context;
public final ArrayList<Handler> handlers = new ArrayList<>();
public Sound sound;
public Storage storage;
public Encoder e;
public AtomicBoolean interrupt = new AtomicBoolean(); // nio throws ClosedByInterruptException if thread interrupted
public Thread thread;
public final Object bufferSizeLock = new Object(); // lock for bufferSize
public int bufferSize; // dynamic buffer size. big for backgound recording. small for realtime view updates.
public int sampleRate; // variable from settings. how may samples per second.
public int samplesUpdate; // pitch size in samples. how many samples count need to update view. 4410 for 100ms update.
public int samplesUpdateStereo; // samplesUpdate * number of channels
public Uri targetUri = null; // output target file 2016-01-01 01.01.01.wav
public long samplesTime; // how many samples passed for current recording, stereo = samplesTime * 2
public ShortBuffer dbBuffer = null; // PinchView samples buffer
public int pitchTime; // screen width
public RecordingStorage(Context context, int pitchTime, Uri targetUri) {
this.context = context;
this.pitchTime = pitchTime;
this.targetUri = targetUri;
storage = new Storage(context);
sound = new Sound(context);
sampleRate = Sound.getSampleRate(context);
samplesUpdate = (int) (pitchTime * sampleRate / 1000f);
samplesUpdateStereo = samplesUpdate * Sound.getChannels(context);
}
public void startRecording() {
sound.silent();
final SharedPreferences shared = android.preference.PreferenceManager.getDefaultSharedPreferences(context);
int user;
if (shared.getString(AudioApplication.PREFERENCE_SOURCE, context.getString(R.string.source_mic)).equals(context.getString(R.string.source_raw))) {
if (Sound.isUnprocessedSupported(context))
user = MediaRecorder.AudioSource.UNPROCESSED;
else
user = MediaRecorder.AudioSource.VOICE_RECOGNITION;
} else {
user = MediaRecorder.AudioSource.MIC;
}
int[] ss = new int[]{
user,
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.DEFAULT
};
if (shared.getBoolean(AudioApplication.PREFERENCE_FLY, false)) {
final OnFlyEncoding fly = new OnFlyEncoding(storage, targetUri, getInfo());
if (e == null) { // do not recreate encoder if on-fly mode enabled
e = new Encoder() {
@Override
public void encode(short[] buf, int pos, int len) {
fly.encode(buf, pos, len);
}
@Override
public void close() {
fly.close();
}
};
}
} else {
final RawSamples rs = new RawSamples(storage.getTempRecording());
rs.open(samplesTime * Sound.getChannels(context));
e = new Encoder() {
@Override
public void encode(short[] buf, int pos, int len) {
rs.write(buf, pos, len);
}
@Override
public void close() {
rs.close();
}
};
}
final AudioRecord recorder = Sound.createAudioRecorder(context, sampleRate, ss, 0);
final Thread old = thread;
final AtomicBoolean oldb = interrupt;
interrupt = new AtomicBoolean(false);
thread = new Thread("RecordingThread") {
@Override
public void run() {
if (old != null) {
oldb.set(true);
old.interrupt();
try {
old.join();
} catch (InterruptedException e) {
return;
}
}
PowerManager pm = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
PowerManager.WakeLock wlcpu = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, BuildConfig.APPLICATION_ID + ":recordinglock");
wlcpu.acquire();
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
boolean silenceDetected = false;
long silence = samplesTime; // last non silence frame
long start = System.currentTimeMillis(); // recording start time
long session = 0; // samples count from start of recording
try {
long last = System.currentTimeMillis();
recorder.startRecording();
int samplesTimeCount = 0;
final int samplesTimeUpdate = 1000 * sampleRate / 1000; // how many samples we need to update 'samples'. time clock. every 1000ms.
short[] buffer = null;
boolean stableRefresh = false;
while (!interrupt.get()) {
synchronized (bufferSizeLock) {
if (buffer == null || buffer.length != bufferSize)
buffer = new short[bufferSize];
}
int readSize = recorder.read(buffer, 0, buffer.length);
if (readSize < 0)
return;
long now = System.currentTimeMillis();
long diff = (now - last) * sampleRate / 1000;
last = now;
int samples = readSize / Sound.getChannels(context); // mono samples (for booth channels)
if (stableRefresh || diff >= samples) {
stableRefresh = true;
e.encode(buffer, 0, readSize);
short[] dbBuf;
int dbSize;
int readSizeUpdate;
if (dbBuffer != null) {
ShortBuffer bb = ShortBuffer.allocate(dbBuffer.position() + readSize);
dbBuffer.flip();
bb.put(dbBuffer);
bb.put(buffer, 0, readSize);
dbBuf = new short[bb.position()];
dbSize = dbBuf.length;
bb.flip();
bb.get(dbBuf, 0, dbBuf.length);
} else {
dbBuf = buffer;
dbSize = readSize;
}
readSizeUpdate = dbSize / samplesUpdateStereo * samplesUpdateStereo;
for (int i = 0; i < readSizeUpdate; i += samplesUpdateStereo) {
double a = RawSamples.getAmplitude(dbBuf, i, samplesUpdateStereo);
if (a != 0)
silence = samplesTime + (i + samplesUpdateStereo) / Sound.getChannels(context);
double dB = RawSamples.getDB(a);
Post(PINCH, dB);
}
int readSizeLen = dbSize - readSizeUpdate;
if (readSizeLen > 0) {
dbBuffer = ShortBuffer.allocate(readSizeLen);
dbBuffer.put(dbBuf, readSizeUpdate, readSizeLen);
} else {
dbBuffer = null;
}
samplesTime += samples;
samplesTimeCount += samples;
if (samplesTimeCount > samplesTimeUpdate) {
Post(UPDATESAMPLES, samplesTime);
samplesTimeCount -= samplesTimeUpdate;
}
session += samples;
if (samplesTime - silence > 2 * sampleRate) { // 2 second of mic muted
if (!silenceDetected) {
silenceDetected = true;
Post(MUTED, null);
}
} else {
if (silenceDetected) {
silenceDetected = false;
Post(UNMUTED, null);
}
}
diff = (now - start) * sampleRate / 1000; // number of samples we expect by this moment
if (diff - session > 2 * sampleRate) { // 2 second of silence / paused by os
Post(PAUSED, null);
session = diff; // reset
}
}
}
} catch (final RuntimeException e) {
Post(e);
} finally {
wlcpu.release();
// redraw view, we may add one last pich which is not been drawen because draw tread already interrupted.
// to prevent resume recording jump - draw last added pitch here.
Post(END, null);
if (recorder != null)
recorder.release();
if (!shared.getBoolean(AudioApplication.PREFERENCE_FLY, false)) { // keep encoder open if encoding on fly enabled
try {
if (e != null) {
e.close();
e = null;
}
} catch (RuntimeException e) {
Post(e);
}
}
}
}
};
thread.start();
}
public void stopRecording() {
if (thread != null) {
interrupt.set(true);
try {
thread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
thread = null;
}
sound.unsilent();
}
public RawSamples.Info getInfo() {
return new RawSamples.Info(sampleRate, Sound.getChannels(context));
}
// calcuale buffer length dynamically, this way we can reduce thread cycles when activity in background
// or phone screen is off.
public void updateBufferSize(boolean pause) {
synchronized (bufferSizeLock) {
int samplesUpdate;
if (pause) {
// we need make buffer multiply of pitch.getPitchTime() (100 ms).
// to prevent missing blocks from view otherwise:
// file may contain not multiply 'samplesUpdate' count of samples. it is about 100ms.
// we can't show on pitchView sorter then 100ms samples. we can't add partial sample because on
// resumeRecording we have to apply rest of samplesUpdate or reload all samples again
// from file. better then confusing user we cut them on next resumeRecording.
long l = 1000L / pitchTime * pitchTime;
samplesUpdate = (int) (l * sampleRate / 1000.0);
} else {
samplesUpdate = this.samplesUpdate;
}
bufferSize = samplesUpdate * Sound.getChannels(context);
}
}
public boolean isForeground() {
synchronized (bufferSizeLock) {
return bufferSize == this.samplesUpdate * Sound.getChannels(context);
}
}
public void Post(Throwable e) {
Post(ERROR, e);
}
public void Post(int what, Object p) {
synchronized (handlers) {
for (Handler h : handlers)
h.obtainMessage(what, p).sendToTarget();
}
}
}

View file

@ -61,10 +61,10 @@ public class Recordings extends com.github.axet.audiolibrary.app.Recordings {
@Override
public void scan(List<Storage.Node> nn, boolean clean, Runnable done) {
EncodingService.EncodingStorage storage = new EncodingService.EncodingStorage(new Storage(context));
EncodingStorage storage = new EncodingStorage(context);
for (Storage.Node n : new ArrayList<>(nn)) {
for (File key : storage.keySet()) {
EncodingService.EncodingStorage.Info info = storage.get(key);
EncodingStorage.Info info = storage.get(key);
if (n.uri.equals(info.targetUri))
nn.remove(n);
}

View file

@ -6,12 +6,9 @@ import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.support.annotation.Nullable;
import android.util.Log;
import android.os.Handler;
import android.os.Message;
import android.view.View;
import com.github.axet.androidlibrary.app.AlarmManager;
@ -23,23 +20,16 @@ import com.github.axet.audiolibrary.app.RawSamples;
import com.github.axet.audiolibrary.encoders.FileEncoder;
import com.github.axet.audiolibrary.encoders.FormatWAV;
import com.github.axet.audiolibrary.encoders.OnFlyEncoding;
import com.github.axet.audiolibrary.filters.AmplifierFilter;
import com.github.axet.audiolibrary.filters.SkipSilenceFilter;
import com.github.axet.audiolibrary.filters.VoiceFilter;
import com.github.axet.audiorecorder.R;
import com.github.axet.audiorecorder.activities.MainActivity;
import com.github.axet.audiorecorder.activities.RecordingActivity;
import com.github.axet.audiorecorder.app.AudioApplication;
import com.github.axet.audiorecorder.app.EncodingStorage;
import com.github.axet.audiorecorder.app.Storage;
import org.apache.commons.io.FileUtils;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.FilenameFilter;
import java.nio.charset.Charset;
import java.util.HashMap;
public class EncodingService extends PersistentService {
public static final String TAG = EncodingService.class.getSimpleName();
@ -48,25 +38,39 @@ public class EncodingService extends PersistentService {
public static String SHOW_ACTIVITY = EncodingService.class.getCanonicalName() + ".SHOW_ACTIVITY";
public static String SAVE_AS_WAV = EncodingService.class.getCanonicalName() + ".SAVE_AS_WAV";
public static String UPDATE_ENCODING = EncodingService.class.getCanonicalName() + ".UPDATE_ENCODING";
public static String DONE_ENCODING = EncodingService.class.getCanonicalName() + ".DONE_ENCODING";
public static String START_ENCODING = EncodingService.class.getCanonicalName() + ".START_ENCODING";
public static String ERROR = EncodingService.class.getCanonicalName() + ".ERROR";
public static String JSON_EXT = "json";
static {
OptimizationPreferenceCompat.REFRESH = AlarmManager.MIN1;
}
Storage storage; // for storage path
EncodingStorage encodings;
FileEncoder encoder;
Handler handler = new Handler() {
long last = 0;
@Override
public void handleMessage(Message msg) {
if (msg.what == EncodingStorage.UPDATE) {
long now = System.currentTimeMillis();
if (last + 1000 < now) {
last = now;
optimization.icon.updateIcon((Intent) msg.obj);
}
}
if (msg.what == EncodingStorage.DONE) {
EncodingStorage encodings = ((AudioApplication) getApplication()).encodings;
encodings.restart();
}
if (msg.what == EncodingStorage.ERROR) {
stopSelf();
}
}
};
public static void startIfPending(Context context) { // if encoding pending
Storage storage = new Storage(context);
EncodingStorage enc = new EncodingStorage(storage);
if (!enc.isEmpty()) {
EncodingStorage encodings = ((AudioApplication) context.getApplicationContext()).encodings;
encodings.load();
if (!encodings.isEmpty()) {
start(context);
return;
}
@ -92,22 +96,10 @@ public class EncodingService extends PersistentService {
stop(context, new Intent(context, EncodingService.class));
}
public void Error(File in, RawSamples.Info info, Throwable e) {
try {
sendBroadcast(new Intent(ERROR)
.putExtra("in", in)
.putExtra("info", info.save().toString())
.putExtra("e", e)
);
} catch (JSONException e1) {
throw new RuntimeException(e1);
}
}
public static void startEncoding(Context context, File in, Uri targetUri, RawSamples.Info info) {
try {
EncodingStorage storage = new EncodingStorage(new Storage(context));
in = storage.save(in, targetUri, info);
EncodingStorage encodings = ((AudioApplication) context.getApplicationContext()).encodings;
in = encodings.save(in, targetUri, info);
start(context, new Intent(context, EncodingService.class).setAction(START_ENCODING)
.putExtra("in", in)
.putExtra("targetUri", targetUri)
@ -118,91 +110,6 @@ public class EncodingService extends PersistentService {
}
}
public static class EncodingStorage extends HashMap<File, EncodingStorage.Info> {
public Storage storage;
public static File jsonFile(File f) {
return new File(f.getParentFile(), Storage.getNameNoExt(f) + "." + JSON_EXT);
}
public static class Info {
public Uri targetUri;
public RawSamples.Info info;
public Info() {
}
public Info(Uri t, RawSamples.Info i) {
this.targetUri = t;
this.info = i;
}
public Info(String json) throws JSONException {
load(new JSONObject(json));
}
public Info(JSONObject json) throws JSONException {
load(json);
}
public JSONObject save() throws JSONException {
JSONObject json = new JSONObject();
json.put("targetUri", targetUri.toString());
json.put("info", info.save());
return json;
}
public void load(JSONObject json) throws JSONException {
targetUri = Uri.parse(json.getString("targetUri"));
info = new RawSamples.Info(json.getJSONObject("info"));
}
}
public EncodingStorage(Storage s) {
storage = s;
load();
}
public void load() {
clear();
File storage = this.storage.getTempRecording().getParentFile();
File[] ff = storage.listFiles(new FilenameFilter() {
String start = Storage.getNameNoExt(Storage.TMP_ENC);
String ext = Storage.getExt(Storage.TMP_ENC);
@Override
public boolean accept(File dir, String name) {
return name.startsWith(start) && name.endsWith("." + ext);
}
});
if (ff == null)
return;
for (File f : ff) {
File j = jsonFile(f);
try {
put(f, new Info(new JSONObject(FileUtils.readFileToString(j, Charset.defaultCharset()))));
} catch (Exception e) {
Log.d(TAG, "unable to read json", e);
}
}
}
public File save(File in, Uri targetUri, RawSamples.Info info) {
File to = storage.getTempEncoding();
to = Storage.getNextFile(to);
to = Storage.move(in, to);
try {
File j = jsonFile(to);
Info rec = new Info(targetUri, info);
JSONObject json = rec.save();
FileUtils.writeStringToFile(j, json.toString(), Charset.defaultCharset());
return to;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public EncodingService() {
}
@ -214,7 +121,6 @@ public class EncodingService extends PersistentService {
@Override
public void onCreateOptimization() {
storage = new Storage(this);
encodings = new EncodingStorage(storage);
optimization = new OptimizationPreferenceCompat.ServiceReceiver(this, NOTIFICATION_RECORDING_ICON, null, AudioApplication.PREFERENCE_NEXT) {
Intent notificationIntent;
@ -280,11 +186,25 @@ public class EncodingService extends PersistentService {
}
};
optimization.create();
EncodingStorage encodings = ((AudioApplication) getApplication()).encodings;
synchronized (encodings.handlers) {
encodings.handlers.add(handler);
}
}
@Override
public void onDestroy() {
super.onDestroy();
EncodingStorage encodings = ((AudioApplication) getApplication()).encodings;
synchronized (encodings.handlers) {
encodings.handlers.remove(handler);
}
}
@Override
public void onStartCommand(Intent intent) {
String a = intent.getAction();
final EncodingStorage encodings = ((AudioApplication) getApplication()).encodings;
if (a == null) {
optimization.icon.updateIcon(intent);
} else if (a.equals(SHOW_ACTIVITY)) {
@ -299,18 +219,8 @@ public class EncodingService extends PersistentService {
File out = (File) intent.getSerializableExtra("out"); // dir
out = storage.getNewFile(out, FormatWAV.EXT);
RawSamples.Info info = new RawSamples.Info(intent.getStringExtra("info"));
if (encoder == null) {
OnFlyEncoding fly = new OnFlyEncoding(storage, out, info);
encoder = new FileEncoder(this, in, fly);
encoding(encoder, fly, info, new Runnable() {
@Override
public void run() {
encoder.close();
encoder = null;
startEncoding();
}
});
}
if (encodings.encoder == null)
encodings.saveAsWAV(in, out, info);
} catch (JSONException e) {
throw new RuntimeException(e);
}
@ -319,113 +229,12 @@ public class EncodingService extends PersistentService {
File in = (File) intent.getSerializableExtra("in");
Uri targetUri = intent.getParcelableExtra("targetUri");
RawSamples.Info info = new RawSamples.Info(intent.getStringExtra("info"));
if (encoder == null) {
OnFlyEncoding fly = new OnFlyEncoding(storage, targetUri, info);
encoder = new FileEncoder(this, in, fly);
encodingFilters(encoder, fly, info, new Runnable() {
@Override
public void run() {
encoder.close();
encoder = null;
startEncoding();
}
});
}
if (encodings.encoder == null)
encodings.encoding(in, targetUri, info);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
startEncoding();
}
public void startEncoding() {
if (encoder != null)
return;
encodings.load();
for (File in : encodings.keySet()) {
EncodingStorage.Info info = encodings.get(in);
final OnFlyEncoding fly = new OnFlyEncoding(this.storage, info.targetUri, info.info);
encoder = new FileEncoder(this, in, fly);
encodingFilters(encoder, fly, info.info, new Runnable() {
@Override
public void run() {
encoder.close();
encoder = null;
startEncoding();
}
});
return;
}
stopSelf();
}
void encodingFilters(final FileEncoder encoder, final OnFlyEncoding fly, final RawSamples.Info info, final Runnable done) {
SharedPreferences shared = PreferenceManager.getDefaultSharedPreferences(this);
if (shared.getBoolean(AudioApplication.PREFERENCE_VOICE, false))
encoder.filters.add(new VoiceFilter(info));
float amp = shared.getFloat(AudioApplication.PREFERENCE_VOLUME, 1);
if (amp != 1)
encoder.filters.add(new AmplifierFilter(amp));
if (shared.getBoolean(AudioApplication.PREFERENCE_SKIP, false))
encoder.filters.add(new SkipSilenceFilter(info));
encoding(encoder, fly, info, done);
}
void encoding(final FileEncoder encoder, final OnFlyEncoding fly, final RawSamples.Info info, final Runnable done) {
encoder.run(new Runnable() {
long last = 0;
@Override
public void run() {
try {
long cur = encoder.getCurrent();
long total = encoder.getTotal();
long now = System.currentTimeMillis();
Intent intent = new Intent(UPDATE_ENCODING)
.putExtra("cur", cur)
.putExtra("total", total)
.putExtra("info", info.save().toString())
.putExtra("targetUri", fly.targetUri)
.putExtra("targetFile", Storage.getName(EncodingService.this, fly.targetUri));
if (last + 1000 < now) {
last = now;
sendBroadcast(intent);
optimization.icon.updateIcon(intent);
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
}, new Runnable() {
@Override
public void run() { // success
Storage.delete(encoder.in); // delete raw recording
Storage.delete(EncodingStorage.jsonFile(encoder.in)); // delete json file
sendBroadcast(new Intent(DONE_ENCODING)
.putExtra("targetUri", fly.targetUri)
);
done.run();
}
}, new Runnable() {
@Override
public void run() { // or error
Storage.delete(EncodingService.this, fly.targetUri); // fly has fd, delete target manually
Error(encoder.in, info, encoder.getException());
stopSelf();
}
});
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onDestroy() {
super.onDestroy();
encodings.startEncoding();
}
}