├── .gitignore
├── LICENSE
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── amirz
│ │ └── dngprocessor
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── ic_launcher-web.png
│ ├── java
│ │ └── amirz
│ │ │ ├── dngprocessor
│ │ │ ├── MainActivity.java
│ │ │ ├── Preferences.java
│ │ │ ├── colorspace
│ │ │ │ ├── ColorspaceConstants.java
│ │ │ │ └── ColorspaceConverter.java
│ │ │ ├── device
│ │ │ │ ├── DeviceMap.java
│ │ │ │ ├── Generic.java
│ │ │ │ ├── Mi9.java
│ │ │ │ ├── MotoG6.java
│ │ │ │ ├── OnePlus.java
│ │ │ │ ├── OnePlus3.java
│ │ │ │ ├── OnePlus5.java
│ │ │ │ ├── OnePlus6.java
│ │ │ │ ├── OnePlus7.java
│ │ │ │ ├── Redmi.java
│ │ │ │ └── Xiaomi.java
│ │ │ ├── gl
│ │ │ │ ├── GLCore.java
│ │ │ │ ├── GLPrograms.java
│ │ │ │ ├── GLResource.java
│ │ │ │ ├── SquareModel.java
│ │ │ │ ├── Texture.java
│ │ │ │ └── TexturePool.java
│ │ │ ├── math
│ │ │ │ ├── BlockDivider.java
│ │ │ │ ├── Convolve.java
│ │ │ │ └── Histogram.java
│ │ │ ├── params
│ │ │ │ ├── ProcessParams.java
│ │ │ │ └── SensorParams.java
│ │ │ ├── parser
│ │ │ │ ├── ByteReader.java
│ │ │ │ ├── CFAPattern.java
│ │ │ │ ├── DngParser.java
│ │ │ │ ├── OpParser.java
│ │ │ │ ├── TIFF.java
│ │ │ │ ├── TIFFTag.java
│ │ │ │ └── TagParser.java
│ │ │ ├── pipeline
│ │ │ │ ├── GLBlockProcessing.java
│ │ │ │ ├── Stage.java
│ │ │ │ ├── StagePipeline.java
│ │ │ │ ├── convert
│ │ │ │ │ ├── EdgeMirror.java
│ │ │ │ │ ├── GreenDemosaic.java
│ │ │ │ │ ├── PreProcess.java
│ │ │ │ │ └── ToIntermediate.java
│ │ │ │ ├── exposefuse
│ │ │ │ │ ├── DoubleExpose.java
│ │ │ │ │ ├── FuseUtils.java
│ │ │ │ │ ├── Laplace.java
│ │ │ │ │ └── Merge.java
│ │ │ │ ├── intermediate
│ │ │ │ │ ├── Analysis.java
│ │ │ │ │ ├── BilateralFilter.java
│ │ │ │ │ └── MergeDetail.java
│ │ │ │ ├── noisereduce
│ │ │ │ │ ├── Decompose.java
│ │ │ │ │ ├── NoiseMap.java
│ │ │ │ │ └── NoiseReduce.java
│ │ │ │ └── post
│ │ │ │ │ ├── BlurLCE.java
│ │ │ │ │ └── ToneMap.java
│ │ │ ├── scheduler
│ │ │ │ ├── BootReceiver.java
│ │ │ │ ├── DngParseService.java
│ │ │ │ └── DngScanJob.java
│ │ │ └── util
│ │ │ │ ├── Constants.java
│ │ │ │ ├── NotifHandler.java
│ │ │ │ ├── Path.java
│ │ │ │ ├── ShaderLoader.java
│ │ │ │ └── Utilities.java
│ │ │ └── library
│ │ │ └── settings
│ │ │ ├── GlobalPreferences.java
│ │ │ └── TextPreference.java
│ └── res
│ │ ├── drawable
│ │ └── ic_notif.xml
│ │ ├── mipmap-anydpi-v26
│ │ └── ic_launcher.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_foreground.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_foreground.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_foreground.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_foreground.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_foreground.png
│ │ ├── raw
│ │ ├── import_gamma.glsl
│ │ ├── import_gaussian.glsl
│ │ ├── import_load3x3.glsl
│ │ ├── import_load3x3v2.glsl
│ │ ├── import_load3x3v3.glsl
│ │ ├── import_load5x5v3.glsl
│ │ ├── import_sigmoid.glsl
│ │ ├── import_xyy_to_xyz.glsl
│ │ ├── import_xyz_to_xyy.glsl
│ │ ├── passthrough_vs.glsl
│ │ ├── stage1_1_fs.glsl
│ │ ├── stage1_2_fs.glsl
│ │ ├── stage1_3_fs.glsl
│ │ ├── stage1_4_edge_mirror_fs.glsl
│ │ ├── stage2_0_blur_3ch_fs.glsl
│ │ ├── stage2_1_bilateral_ch.glsl
│ │ ├── stage2_1_noise_level_blur_fs.glsl
│ │ ├── stage2_1_noise_level_fs.glsl
│ │ ├── stage2_2_analysis_fs.glsl
│ │ ├── stage2_3_bilateral.glsl
│ │ ├── stage2_3_median.glsl
│ │ ├── stage2_4_merge_detail.glsl
│ │ ├── stage3_1_noise_reduce_chroma_fs.glsl
│ │ ├── stage3_1_noise_reduce_fs.glsl
│ │ ├── stage3_1_noise_reduce_median_fs.glsl
│ │ ├── stage3_1_noise_reduce_remove_noise_fs.glsl
│ │ ├── stage3_2_blur_fs.glsl
│ │ ├── stage3_3_tonemap_fs.glsl
│ │ ├── stage4_0_blur_1ch_fs.glsl
│ │ ├── stage4_1_doubleexpose.glsl
│ │ ├── stage4_2_downsample.glsl
│ │ ├── stage4_3_upsample.glsl
│ │ ├── stage4_4_difference.glsl
│ │ ├── stage4_5_merge.glsl
│ │ ├── stage4_6_xyz_to_xyy.glsl
│ │ ├── stage4_7_nr_intermediate.glsl
│ │ ├── stage4_8_nr_zero.glsl
│ │ └── stage4_9_combine_z.glsl
│ │ ├── values
│ │ ├── colors.xml
│ │ ├── config.xml
│ │ ├── strings.xml
│ │ ├── strings_prefs.xml
│ │ └── styles.xml
│ │ └── xml
│ │ └── preferences.xml
│ └── test
│ └── java
│ └── amirz
│ └── dngprocessor
│ └── ExampleUnitTest.java
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Project files and paths.
2 | .classpath
3 | .project
4 | .project.properties
5 | *.iml
6 | **/*.iml
7 | .settings/
8 | bin/
9 | libs/
10 | gen/
11 | .idea/
12 | .gradle/
13 | gradle/
14 | build/
15 | gradlew*
16 | local.properties
17 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DNG Processor
2 |
3 | ## How does this app work?
4 | It waits for new RAW images captured using a supported camera app, and then processes them in the background.
5 | It does this by using the power of your phone's graphics unit for up to a few seconds.
6 | The results have more details, higher dynamic range, and can improve over time when the application gets updated.
7 |
8 | ## Why not Google Camera?
9 | There are two reasons why using this app might be preferable over using Google Camera.
10 | The first reason is that the ports all have their own bugs and kinks, while the camera app included on the device usually works well in any situation, albeit with lesser quality.
11 | If something does go wrong in DNG Processor, the RAW file remains, so your shot is never lost.
12 | The second reason is that this app is fully open source and will get better over time through a community effort.
13 | People with image processing experience can take a look at the code and contribute, since there’s no obfuscation.
14 |
15 | ## Samples
16 | https://drive.google.com/open?id=1vCdjNCKu2GnEVwHnuQ3LkQkPnNZNQSj0
17 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 28
5 | defaultConfig {
6 | applicationId "amirz.dngprocessor"
7 | minSdkVersion 24
8 | targetSdkVersion 28
9 | versionCode 5
10 | versionName = versionCode
11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
12 | }
13 | buildTypes {
14 | release {
15 | minifyEnabled false
16 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
17 | }
18 | }
19 | compileOptions {
20 | sourceCompatibility = '1.8'
21 | targetCompatibility = '1.8'
22 | }
23 | applicationVariants.all {
24 | variant -> variant.outputs.all {
25 | outputFileName = "DNGProcessor-${variant.name}.apk"
26 | }
27 | }
28 | }
29 |
30 | dependencies {
31 | implementation fileTree(dir: 'libs', include: ['*.jar'])
32 | implementation 'com.android.support:exifinterface:28.0.0'
33 | testImplementation 'junit:junit:4.12'
34 | androidTestImplementation 'com.android.support.test:runner:1.0.2'
35 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
36 | }
37 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/amirz/dngprocessor/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumented test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("amirz.dngprocessor", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
33 |
34 |
35 |
36 |
37 |
38 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/app/src/main/ic_launcher-web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/ic_launcher-web.png
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/MainActivity.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor;
2 |
3 | import android.Manifest;
4 | import android.app.Activity;
5 | import android.content.ClipData;
6 | import android.content.Intent;
7 | import android.net.Uri;
8 | import android.os.Bundle;
9 | import android.preference.Preference;
10 |
11 | import amirz.dngprocessor.scheduler.DngParseService;
12 | import amirz.dngprocessor.scheduler.DngScanJob;
13 | import amirz.dngprocessor.util.NotifHandler;
14 | import amirz.dngprocessor.util.Path;
15 |
16 | import static android.content.pm.PackageManager.PERMISSION_GRANTED;
17 |
18 | public class MainActivity extends Activity {
19 | private static final String TAG = "MainActivity";
20 |
21 | private static final int REQUEST_PERMISSIONS = 1;
22 | private static final int REQUEST_IMAGE = 2;
23 |
24 | @Override
25 | protected void onCreate(Bundle savedInstanceState) {
26 | super.onCreate(savedInstanceState);
27 |
28 | if (savedInstanceState == null) {
29 | NotifHandler.createChannel(this);
30 | tryLoad();
31 | }
32 | }
33 |
34 | private boolean hasPermissions() {
35 | return checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE)
36 | == PERMISSION_GRANTED;
37 | }
38 |
39 | private void tryLoad() {
40 | if (hasPermissions()) {
41 | DngScanJob.scheduleJob(this);
42 | getFragmentManager().beginTransaction()
43 | .replace(android.R.id.content, new Preferences.Fragment())
44 | .commit();
45 | } else {
46 | requestPermissions(new String[] {
47 | Manifest.permission.WRITE_EXTERNAL_STORAGE
48 | }, REQUEST_PERMISSIONS);
49 | }
50 | }
51 |
52 | @Override
53 | public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
54 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
55 |
56 | switch (requestCode) {
57 | case REQUEST_PERMISSIONS:
58 | if (grantResults[0] == PERMISSION_GRANTED) {
59 | tryLoad();
60 | }
61 | break;
62 | }
63 | }
64 |
65 | public boolean requestImage(Preference preference) {
66 | Intent picker = new Intent(Intent.ACTION_OPEN_DOCUMENT);
67 | picker.addFlags(Intent.FLAG_GRANT_PERSISTABLE_URI_PERMISSION);
68 | picker.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
69 | picker.setType(Path.MIME_RAW);
70 | picker.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true);
71 | startActivityForResult(picker, REQUEST_IMAGE);
72 |
73 | return false;
74 | }
75 |
76 | @Override
77 | protected void onActivityResult(int requestCode, int resultCode, Intent data) {
78 | if (requestCode == REQUEST_IMAGE && resultCode == RESULT_OK) {
79 | int flags = data.getFlags();
80 | ClipData cd = data.getClipData();
81 | if (cd == null) {
82 | process(data.getData(), flags);
83 | } else {
84 | for (int i = 0; i < cd.getItemCount(); i++) {
85 | process(cd.getItemAt(i).getUri(), flags);
86 | }
87 | }
88 | }
89 | }
90 |
91 | @Override
92 | public void recreate() {
93 | finish();
94 | startActivity(getIntent());
95 | }
96 |
97 | private void process(Uri uri, int flags) {
98 | getContentResolver().takePersistableUriPermission(uri,
99 | Intent.FLAG_GRANT_READ_URI_PERMISSION);
100 | DngParseService.runForUri(this, uri);
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/colorspace/ColorspaceConstants.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.colorspace;
2 |
3 | import android.hardware.camera2.CameraMetadata;
4 | import android.util.SparseIntArray;
5 |
6 | public class ColorspaceConstants {
7 | /**
8 | * Matrix to convert from CIE XYZ colorspace to sRGB, Bradford-adapted to D65.
9 | */
10 | public static final float[] sXYZtoSRGB = new float[]{
11 | 3.1338561f, -1.6168667f, -0.4906146f,
12 | -0.9787684f, 1.9161415f, 0.0334540f,
13 | 0.0719453f, -0.2289914f, 1.4052427f
14 | };
15 |
16 | /**
17 | * Matrix to convert from the ProPhoto RGB colorspace to CIE XYZ colorspace.
18 | */
19 | public static final float[] sProPhotoToXYZ = new float[]{
20 | 0.797779f, 0.135213f, 0.031303f,
21 | 0.288000f, 0.711900f, 0.000100f,
22 | 0.000000f, 0.000000f, 0.825105f
23 | };
24 |
25 | /**
26 | * Matrix to convert from CIE XYZ colorspace to ProPhoto RGB colorspace.
27 | * Tone-mapping is done in that colorspace before converting back.
28 | */
29 | public static final float[] sXYZtoProPhoto = new float[]{
30 | 1.345753f, -0.255603f, -0.051025f,
31 | -0.544426f, 1.508096f, 0.020472f,
32 | 0.000000f, 0.000000f, 1.211968f
33 | };
34 |
35 | /*
36 | * Coefficients for a 3rd order polynomial, ordered from highest to lowest power. This
37 | * polynomial approximates the default tonemapping curve used for ACR3.
38 | *
39 | public static final float[] DEFAULT_ACR3_TONEMAP_CURVE_COEFFS = new float[]{
40 | -0.7836f, 0.8469f, 0.943f, 0.0209f
41 | };
42 | */
43 |
44 | /**
45 | * Coefficients for a 3rd order polynomial, ordered from highest to lowest power.
46 | * Adapted to transform from [0,1] to [0,1]
47 | */
48 | public static final float[] CUSTOM_ACR3_TONEMAP_CURVE_COEFFS = new float[] {
49 | -0.78360f / 1.0063f, 0.84690f / 1.0063f, 0.9430f / 1.0063f, 0f
50 | //-1.087f, 1.643f, 0.443f, 0f
51 | };
52 |
53 | /**
54 | * The D50 whitepoint coordinates in CIE XYZ colorspace.
55 | */
56 | public static final float[] D50_XYZ = new float[]{0.9642f, 1, 0.8249f};
57 |
58 | /**
59 | * An array containing the color temperatures for standard reference illuminants.
60 | */
61 | public static final SparseIntArray sStandardIlluminants = new SparseIntArray();
62 |
63 | public static final int NO_ILLUMINANT = -1;
64 |
65 | static {
66 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, 6504);
67 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, 2856);
68 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_D65, 6504);
69 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_D50, 5003);
70 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_D55, 5503);
71 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_D75, 7504);
72 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, 2856);
73 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B, 4874);
74 | sStandardIlluminants.append(CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C, 6774);
75 | sStandardIlluminants.append(
76 | CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, 6430);
77 | sStandardIlluminants.append(
78 | CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, 4230);
79 | sStandardIlluminants.append(
80 | CameraMetadata.SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, 3450);
81 | // TODO: Add the rest of the illuminants included in the LightSource EXIF tag.
82 |
83 | /*
84 | public static final int SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2;
85 | public static final int SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4;
86 | public static final int SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9;
87 | public static final int SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10;
88 | public static final int SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11;
89 | public static final int SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13;
90 | public static final int SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24;
91 | */
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/DeviceMap.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | import android.util.Log;
4 | import android.util.SparseArray;
5 |
6 | import java.util.ArrayList;
7 | import java.util.List;
8 |
9 | import amirz.dngprocessor.params.ProcessParams;
10 | import amirz.dngprocessor.params.SensorParams;
11 | import amirz.dngprocessor.parser.TIFFTag;
12 |
13 | public class DeviceMap {
14 | private static final String TAG = "DeviceMap";
15 |
16 | public interface Device {
17 | boolean isModel(String model);
18 |
19 | void sensorCorrection(SparseArray tags, SensorParams sensor);
20 |
21 | void processCorrection(SparseArray tags, ProcessParams process);
22 | }
23 |
24 | private static final List sDevices = new ArrayList<>();
25 | static {
26 | sDevices.add(new OnePlus7());
27 | sDevices.add(new OnePlus6());
28 | sDevices.add(new OnePlus5());
29 | sDevices.add(new OnePlus3());
30 | sDevices.add(new OnePlus());
31 |
32 | sDevices.add(new MotoG6());
33 |
34 | sDevices.add(new Mi9());
35 | sDevices.add(new Redmi());
36 |
37 | sDevices.add(new Generic());
38 | }
39 |
40 | public static Device get(String model) {
41 | for (Device device : sDevices) {
42 | if (device.isModel(model)) {
43 | Log.d(TAG, "Device " + model + " found: " + device.getClass().getSimpleName());
44 | return device;
45 | }
46 | }
47 | throw new RuntimeException("No device found");
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/Generic.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | import android.util.SparseArray;
4 |
5 | import amirz.dngprocessor.params.ProcessParams;
6 | import amirz.dngprocessor.params.SensorParams;
7 | import amirz.dngprocessor.parser.TIFFTag;
8 |
9 | class Generic implements DeviceMap.Device {
10 | @Override
11 | public boolean isModel(String model) {
12 | return true;
13 | }
14 |
15 | @Override
16 | public void sensorCorrection(SparseArray tags, SensorParams sensor) {
17 | }
18 |
19 | @Override
20 | public void processCorrection(SparseArray tags, ProcessParams process) {
21 | saturationCorrection(process.saturationMap);
22 | }
23 |
24 | void saturationCorrection(float[] saturationMap) {
25 | float genericMult = 1.1f;
26 | saturationMap[0] *= genericMult;
27 | saturationMap[1] *= genericMult;
28 | saturationMap[2] *= genericMult;
29 | saturationMap[3] *= genericMult;
30 | saturationMap[4] *= genericMult;
31 | saturationMap[5] *= genericMult;
32 | saturationMap[6] *= genericMult;
33 | saturationMap[7] *= genericMult;
34 | }
35 |
36 | static float[] d2f(double... doubles) {
37 | float[] floats = new float[doubles.length * 4];
38 | for (int i = 0; i < doubles.length; i++) {
39 | for (int j = 0; j < 4; j++) {
40 | floats[i * 4 + j] = (float) doubles[i];
41 | }
42 | }
43 | return floats;
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/MotoG6.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | import android.util.SparseArray;
4 |
5 | import amirz.dngprocessor.params.ProcessParams;
6 | import amirz.dngprocessor.params.SensorParams;
7 | import amirz.dngprocessor.parser.TIFF;
8 | import amirz.dngprocessor.parser.TIFFTag;
9 |
10 | import static amirz.dngprocessor.util.Constants.PLUS;
11 |
12 | public class MotoG6 extends Generic {
13 | @Override
14 | public boolean isModel(String model) {
15 | return model.startsWith("moto g(6)");
16 | }
17 |
18 | @Override
19 | public void sensorCorrection(SparseArray tags, SensorParams sensor) {
20 | super.sensorCorrection(tags, sensor);
21 |
22 | if (sensor.gainMap == null) {
23 | // Extracted from a Camera2 photo
24 | sensor.gainMap = d2f(
25 | 2.7091816625, 2.454971925, 2.1506592625, 1.9805712624999998, 1.82734355, 1.72615125, 1.6894398, 1.72301305, 1.8291785, 1.9754675750000001, 2.156523375, 2.4733261499999997, 2.7608015874999996,
26 | 2.5475299625, 2.1676563499999997, 1.9369760500000002, 1.7285031499999999, 1.559904775, 1.4656663250000002, 1.436121125, 1.4639520125, 1.5537302, 1.7218862874999998, 1.9320019000000002, 2.168759525, 2.5633632499999996,
27 | 2.3285123499999996, 2.018356525, 1.7648059625, 1.5328743999999999, 1.3908445375, 1.3060006875, 1.27425725, 1.3030814375, 1.3844899125, 1.52193545, 1.7548983875000002, 2.0037337375, 2.3349842,
28 | 2.204470975, 1.9172350624999999, 1.6325386375000002, 1.4229334999999999, 1.2803529625, 1.1685853000000002, 1.125249575, 1.1605595750000002, 1.2682481625, 1.4085095250000002, 1.6162260000000002, 1.89631545, 2.1930648625,
29 | 2.156538975, 1.8597040374999998, 1.56833175, 1.3714116625, 1.2100522999999999, 1.087551225, 1.0379170875, 1.0745168875, 1.194615475, 1.354820975, 1.5485908625000002, 1.8332039375000002, 2.1250934249999998,
30 | 2.1508892875, 1.859270725, 1.5657657999999999, 1.3697351125000001, 1.2089787125, 1.0862922625, 1.0381207, 1.07642125, 1.195305925, 1.356258375, 1.5466158624999997, 1.82905485, 2.1194305249999994,
31 | 2.2050355625, 1.91479135, 1.6255247500000003, 1.4185947875, 1.277153025, 1.165422425, 1.1231096999999999, 1.1591060375, 1.267472675, 1.4074006625, 1.6074348999999999, 1.8833028125, 2.1737473625,
32 | 2.3236944125, 2.011578975, 1.7546635, 1.5243631750000002, 1.3870425374999997, 1.3024955249999999, 1.2688906625, 1.2979457, 1.3784740875, 1.5121629250000002, 1.7376604, 1.9855739374999999, 2.3107723124999997,
33 | 2.5196864875000005, 2.1556872, 1.9250188625, 1.7114033375, 1.5443060625, 1.4549411125000002, 1.4265024125, 1.4510423375, 1.53533655, 1.6979879125, 1.9084106624999997, 2.1404821000000003, 2.5092960375,
34 | 2.6976679, 2.4191333499999996, 2.1371014, 1.9622389875, 1.8121170500000001, 1.7011511125, 1.6653431624999997, 1.6973457125, 1.7960343625, 1.9418083874999998, 2.11749155, 2.4192555625, 2.7465464624999996
35 | );
36 | sensor.gainMapSize = new int[] { 13, 10 };
37 | }
38 |
39 | // Dot-fix
40 | int w = 16;
41 | int h = 16;
42 |
43 | sensor.hotPixelsSize = new int[] { w, h };
44 | sensor.hotPixels = new short[w * h];
45 |
46 | sensor.hotPixels[2 * w + 14] = PLUS;
47 | sensor.hotPixels[6 * w + 14] = PLUS;
48 | sensor.hotPixels[10 * w + 6] = PLUS;
49 | sensor.hotPixels[14 * w + 6] = PLUS;
50 | }
51 |
52 | @Override
53 | public void processCorrection(SparseArray tags, ProcessParams process) {
54 | TIFFTag software = tags.get(TIFF.TAG_Software);
55 | if (software != null && software.toString().contains("HDR+")) {
56 | float[] saturationMap = process.saturationMap;
57 | saturationMap[0] *= 2.31f;
58 | saturationMap[1] *= 2.63f;
59 | saturationMap[2] *= 3.15f;
60 | saturationMap[3] *= 2.1f;
61 | saturationMap[4] *= 2f;
62 | saturationMap[5] *= 2f;
63 | saturationMap[6] *= 2.52f;
64 | saturationMap[7] *= 2.31f;
65 | } else {
66 | super.processCorrection(tags, process);
67 | }
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/OnePlus.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | import android.util.SparseArray;
4 |
5 | import amirz.dngprocessor.params.SensorParams;
6 | import amirz.dngprocessor.parser.TIFF;
7 | import amirz.dngprocessor.parser.TIFFTag;
8 |
9 | class OnePlus extends Generic {
10 | @Override
11 | public boolean isModel(String model) {
12 | return model.startsWith("ONEPLUS");
13 | }
14 |
15 | void matrixCorrection(SparseArray tags, SensorParams sensor) {
16 | TIFFTag software = tags.get(TIFF.TAG_Software);
17 | if (software != null && software.toString().startsWith("OnePlus")) {
18 | // Credits to Savitar for these values
19 | sensor.colorMatrix1 = new float[] {
20 | 1.0612f, -0.4169f, -0.1001f,
21 | -0.3982f, 1.2675f, 0.1412f,
22 | -0.0558f, 0.162f, 0.5206f
23 | };
24 |
25 | sensor.colorMatrix2 = new float[] {
26 | 1.2341f, -0.666f, 0.0994f,
27 | -0.2806f, 1.0683f, 0.2451f,
28 | 0.0127f, 0.0727f, 0.5789f
29 | };
30 |
31 | sensor.forwardTransform1 = new float[] {
32 | 0.4226f, 0.4079f, 0.1337f,
33 | 0.1871f, 0.7745f, 0.0384f,
34 | 0.0618f, 0.0047f, 0.7586f
35 | };
36 |
37 | sensor.forwardTransform2 = new float[] {
38 | 0.4187f, 0.4351f, 0.1105f,
39 | 0.1772f, 0.7902f, 0.0326f,
40 | 0.047f, 0.001f, 0.7772f
41 | };
42 | }
43 | }
44 |
45 | @Override
46 | void saturationCorrection(float[] saturationMap) {
47 | super.saturationCorrection(saturationMap);
48 | saturationMap[0] *= 1.3f;
49 | saturationMap[1] *= 1.25f;
50 | saturationMap[2] *= 1.5f;
51 | saturationMap[3] *= 1.6f;
52 | saturationMap[4] *= 1.55f;
53 | saturationMap[5] *= 1.5f;
54 | saturationMap[6] *= 1.45f;
55 | saturationMap[7] *= 1.25f;
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/OnePlus3.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | import android.util.SparseArray;
4 |
5 | import amirz.dngprocessor.params.SensorParams;
6 | import amirz.dngprocessor.parser.TIFF;
7 | import amirz.dngprocessor.parser.TIFFTag;
8 |
9 | public class OnePlus3 extends OnePlus {
10 | @Override
11 | public boolean isModel(String model) {
12 | return model.startsWith("ONEPLUS A3");
13 | }
14 |
15 | @Override
16 | public void sensorCorrection(SparseArray tags, SensorParams sensor) {
17 | super.sensorCorrection(tags, sensor);
18 | }
19 |
20 | @Override
21 | void saturationCorrection(float[] saturationMap) {
22 | super.saturationCorrection(saturationMap);
23 | saturationMap[0] *= 1.1f;
24 | saturationMap[2] *= 1.1f;
25 | saturationMap[7] *= 1.1f;
26 | }
27 |
28 | private boolean lowLight(SparseArray tags) {
29 | return exposureAtLeast(tags, 0.05f);
30 | }
31 |
32 | private boolean noLight(SparseArray tags) {
33 | return exposureAtLeast(tags, 0.1f);
34 | }
35 |
36 | private boolean exposureAtLeast(SparseArray tags, float min) {
37 | TIFFTag exposure = tags.get(TIFF.TAG_ExposureTime);
38 | return exposure != null && exposure.getRational().floatValue() >= min;
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/OnePlus5.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | import android.util.SparseArray;
4 |
5 | import amirz.dngprocessor.params.SensorParams;
6 | import amirz.dngprocessor.parser.TIFFTag;
7 |
8 | import static amirz.dngprocessor.util.Constants.HORZ;
9 | import static amirz.dngprocessor.util.Constants.PLUS;
10 | import static amirz.dngprocessor.util.Constants.VERT;
11 |
12 | public class OnePlus5 extends OnePlus {
13 | @Override
14 | public boolean isModel(String model) {
15 | return model.startsWith("ONEPLUS A5");
16 | }
17 |
18 | @Override
19 | public void sensorCorrection(SparseArray tags, SensorParams sensor) {
20 | super.sensorCorrection(tags, sensor);
21 | super.matrixCorrection(tags, sensor);
22 |
23 | // Dot-fix
24 | int w = 8;
25 | int h = 16;
26 |
27 | sensor.hotPixelsSize = new int[] { w, h };
28 | sensor.hotPixels = new short[w * h];
29 |
30 | sensor.hotPixels[6] = HORZ;
31 | sensor.hotPixels[w + 5] = VERT;
32 | sensor.hotPixels[w + 6] = PLUS;
33 | sensor.hotPixels[w + 7] = VERT;
34 | sensor.hotPixels[2 * w + 6] = HORZ;
35 |
36 | sensor.hotPixels[8 * w + 2] = HORZ;
37 | sensor.hotPixels[9 * w + 1] = VERT;
38 | sensor.hotPixels[9 * w + 2] = PLUS;
39 | sensor.hotPixels[9 * w + 3] = VERT;
40 | sensor.hotPixels[10 * w + 2] = HORZ;
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/OnePlus6.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | import android.util.SparseArray;
4 |
5 | import amirz.dngprocessor.params.SensorParams;
6 | import amirz.dngprocessor.parser.TIFFTag;
7 |
8 | public class OnePlus6 extends OnePlus {
9 | @Override
10 | public boolean isModel(String model) {
11 | return model.startsWith("ONEPLUS A6");
12 | }
13 |
14 | @Override
15 | public void sensorCorrection(SparseArray tags, SensorParams sensor) {
16 | super.sensorCorrection(tags, sensor);
17 | super.matrixCorrection(tags, sensor);
18 |
19 | if (sensor.gainMap == null) {
20 | // Extracted from a Camera2 photo
21 | sensor.gainMap = d2f(
22 | 3.3370361328125, 3.12744140625, 2.975341796875, 2.7078857421875, 2.4444580078125, 2.2271728515625, 2.0604248046875, 1.9720458984375, 1.9344482421875, 1.952880859375, 2.0250244140625, 2.18701171875, 2.384033203125, 2.6773681640625, 2.9281005859375, 3.1419677734375, 3.3548583984375,
23 | 3.2327880859375, 2.9757080078125, 2.70556640625, 2.38818359375, 2.115966796875, 1.906494140625, 1.76708984375, 1.6636962890625, 1.629150390625, 1.64453125, 1.727294921875, 1.8604736328125, 2.0703125, 2.33642578125, 2.6546630859375, 2.9547119140625, 3.19677734375,
24 | 3.1611328125, 2.826171875, 2.4580078125, 2.1396484375, 1.856201171875, 1.6614990234375, 1.5130615234375, 1.42919921875, 1.3994140625, 1.4173583984375, 1.48583984375, 1.613525390625, 1.8077392578125, 2.07373046875, 2.380126953125, 2.77294921875, 3.079833984375,
25 | 3.056884765625, 2.6405029296875, 2.2603759765625, 1.929443359375, 1.651611328125, 1.4732666015625, 1.3416748046875, 1.2669677734375, 1.2392578125, 1.2601318359375, 1.3236083984375, 1.4332275390625, 1.610595703125, 1.8687744140625, 2.1915283203125, 2.577392578125, 2.989013671875,
26 | 2.95849609375, 2.4810791015625, 2.0948486328125, 1.770751953125, 1.5208740234375, 1.34033203125, 1.2279052734375, 1.1512451171875, 1.120361328125, 1.1396484375, 1.212890625, 1.3182373046875, 1.481689453125, 1.7080078125, 2.0325927734375, 2.4141845703125, 2.845947265625,
27 | 2.858154296875, 2.3956298828125, 2.011962890625, 1.6895751953125, 1.440185546875, 1.2708740234375, 1.1510009765625, 1.07861328125, 1.0447998046875, 1.0655517578125, 1.1417236328125, 1.2569580078125, 1.4024658203125, 1.6314697265625, 1.9261474609375, 2.3203125, 2.760498046875,
28 | 2.81689453125, 2.3809814453125, 1.97705078125, 1.6605224609375, 1.4189453125, 1.251708984375, 1.1285400390625, 1.0570068359375, 1.021728515625, 1.041748046875, 1.1173095703125, 1.2333984375, 1.382568359375, 1.6031494140625, 1.906982421875, 2.2900390625, 2.7471923828125,
29 | 2.8741455078125, 2.4066162109375, 2.00830078125, 1.696044921875, 1.4451904296875, 1.2745361328125, 1.1553955078125, 1.085693359375, 1.04931640625, 1.07080078125, 1.1396484375, 1.2569580078125, 1.4093017578125, 1.6343994140625, 1.938232421875, 2.3172607421875, 2.7623291015625,
30 | 2.9769287109375, 2.4993896484375, 2.1123046875, 1.7891845703125, 1.5289306640625, 1.3480224609375, 1.2313232421875, 1.1572265625, 1.12841796875, 1.148681640625, 1.2152099609375, 1.3216552734375, 1.4813232421875, 1.7193603515625, 2.0325927734375, 2.4134521484375, 2.8692626953125,
31 | 3.0999755859375, 2.6397705078125, 2.253662109375, 1.93408203125, 1.6622314453125, 1.4727783203125, 1.3448486328125, 1.2716064453125, 1.2393798828125, 1.2608642578125, 1.32080078125, 1.4332275390625, 1.6151123046875, 1.8631591796875, 2.178466796875, 2.5755615234375, 3.00537109375,
32 | 3.2171630859375, 2.8292236328125, 2.4693603515625, 2.134521484375, 1.8638916015625, 1.6602783203125, 1.51123046875, 1.433349609375, 1.401611328125, 1.4202880859375, 1.4886474609375, 1.6136474609375, 1.8143310546875, 2.0699462890625, 2.407958984375, 2.7913818359375, 3.1488037109375,
33 | 3.2989501953125, 3.024169921875, 2.7403564453125, 2.4090576171875, 2.123291015625, 1.915283203125, 1.763427734375, 1.6651611328125, 1.6361083984375, 1.6492919921875, 1.732421875, 1.8704833984375, 2.06689453125, 2.3446044921875, 2.69384765625, 2.99951171875, 3.2501220703125,
34 | 3.3668212890625, 3.15380859375, 3.01904296875, 2.7325439453125, 2.470947265625, 2.230712890625, 2.08447265625, 1.996337890625, 1.9534912109375, 1.971435546875, 2.0645751953125, 2.2052001953125, 2.435302734375, 2.69970703125, 2.985595703125, 3.2374267578125, 3.4730224609375
35 | );
36 | sensor.gainMapSize = new int[] { 17, 13 };
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/Redmi.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | public class Redmi extends Xiaomi {
4 | @Override
5 | public boolean isModel(String model) {
6 | return model.startsWith("Redmi");
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/device/Xiaomi.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.device;
2 |
3 | public class Xiaomi extends Generic {
4 | @Override
5 | void saturationCorrection(float[] saturationMap) {
6 | super.saturationCorrection(saturationMap);
7 | float genericMult = 1.3f;
8 | saturationMap[0] *= genericMult;
9 | saturationMap[1] *= genericMult;
10 | saturationMap[2] *= genericMult;
11 | saturationMap[3] *= genericMult;
12 | saturationMap[4] *= genericMult;
13 | saturationMap[5] *= genericMult;
14 | saturationMap[6] *= genericMult;
15 | saturationMap[7] *= genericMult;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/gl/GLCore.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.gl;
2 |
3 | import android.opengl.EGLConfig;
4 | import android.opengl.EGLContext;
5 | import android.opengl.EGLDisplay;
6 | import android.opengl.EGLSurface;
7 | import android.util.Log;
8 | import android.util.Pair;
9 |
10 | import java.util.ArrayList;
11 | import java.util.HashMap;
12 | import java.util.List;
13 | import java.util.Map;
14 | import java.util.function.Supplier;
15 |
16 | import static android.opengl.EGL14.*;
17 |
18 | /**
19 | * Since OpenGL is entirely static, this is a singleton wrapper.
20 | */
21 | public class GLCore {
22 | private static final String TAG = "GLCore";
23 |
24 | private static GLCore sInstance;
25 |
26 | public static GLCore getInstance() {
27 | if (sInstance == null) {
28 | sInstance = new GLCore();
29 | }
30 | return sInstance;
31 | }
32 |
33 | private final EGLDisplay mDisplay;
34 | private final EGLConfig mConfig;
35 | private final Map, EGLSurface> mSurfaces = new HashMap<>();
36 | private final Map, GLResource> mComponents = new HashMap<>();
37 | private EGLContext mContext;
38 | private EGLSurface mSurface;
39 | private Pair mDimens;
40 |
41 | public GLCore() {
42 | mDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
43 |
44 | int[] major = new int[2];
45 | int[] minor = new int[2];
46 | eglInitialize(mDisplay, major, 0, minor, 0);
47 |
48 | int[] attribList = {
49 | EGL_DEPTH_SIZE, 0,
50 | EGL_STENCIL_SIZE, 0,
51 | EGL_RED_SIZE, 8,
52 | EGL_GREEN_SIZE, 8,
53 | EGL_BLUE_SIZE, 8,
54 | EGL_ALPHA_SIZE, 8,
55 | EGL_BIND_TO_TEXTURE_RGBA, EGL_TRUE,
56 | EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
57 | EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
58 | EGL_NONE
59 | };
60 |
61 | // No error checking performed, minimum required code to elucidate logic
62 | // Expand on this logic to be more selective in choosing a configuration
63 | int[] numConfig = new int[1];
64 | if (!eglChooseConfig(mDisplay, attribList, 0,
65 | null, 0, 0, numConfig, 0)
66 | || numConfig[0] == 0) {
67 | throw new RuntimeException("OpenGL config count zero");
68 | }
69 |
70 | int configSize = numConfig[0];
71 | EGLConfig[] configs = new EGLConfig[configSize];
72 | if (!eglChooseConfig(mDisplay, attribList, 0,
73 | configs, 0, configSize, numConfig, 0)) {
74 | throw new RuntimeException("OpenGL config loading failed");
75 | }
76 |
77 | mConfig = configs[0];
78 | if (mConfig == null) {
79 | throw new RuntimeException("OpenGL config is null");
80 | }
81 | }
82 |
83 | public void setDimens(int width, int height) {
84 | if (mDimens != null && mDimens.first == width && mDimens.second == height) {
85 | Log.d(TAG, "Reusing full Context and Pbuffer Surface");
86 | } else {
87 | closeExistingContext();
88 |
89 | mDimens = new Pair<>(width, height);
90 | Log.d(TAG, "Reusing Pbuffer Surface: " + mSurfaces.containsKey(mDimens));
91 | mSurface = mSurfaces.computeIfAbsent(mDimens, x -> eglCreatePbufferSurface(
92 | mDisplay, mConfig, new int[] {
93 | EGL_WIDTH, x.first,
94 | EGL_HEIGHT, x.second,
95 | EGL_NONE
96 | }, 0));
97 |
98 | mContext = eglCreateContext(mDisplay, mConfig, EGL_NO_CONTEXT, new int[] {
99 | EGL_CONTEXT_CLIENT_VERSION, 3,
100 | EGL_NONE
101 | }, 0);
102 | }
103 |
104 | eglMakeCurrent(mDisplay, mSurface, mSurface, mContext);
105 | }
106 |
107 | private void closeExistingContext() {
108 | if (mContext != null) {
109 | Log.d(TAG, "Closing current context");
110 | for (GLResource resource : mComponents.values()) {
111 | resource.release();
112 | }
113 |
114 | eglMakeCurrent(mDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
115 | eglDestroyContext(mDisplay, mContext);
116 | mContext = null;
117 | }
118 | }
119 |
120 | @SuppressWarnings("unchecked")
121 | public T getComponent(Class cls, Supplier constructor) {
122 | return (T) mComponents.computeIfAbsent(cls, x -> constructor.get());
123 | }
124 | }
125 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/gl/GLResource.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.gl;
2 |
3 | public abstract class GLResource {
4 | /**
5 | * Release static OpenGL resources.
6 | */
7 | public abstract void release();
8 | }
9 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/gl/SquareModel.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.gl;
2 |
3 | import java.nio.ByteBuffer;
4 | import java.nio.ByteOrder;
5 | import java.nio.FloatBuffer;
6 |
7 | import static android.opengl.GLES20.*;
8 |
9 | class SquareModel {
10 | private static final int COORDS_PER_VERTEX = 3;
11 | private static final float[] COORDS = {
12 | -1, 1, 0,
13 | -1, -1, 0,
14 | 1, 1, 0,
15 | 1, -1, 0
16 | };
17 | private static final int STRIDE = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
18 |
19 | private final FloatBuffer mVertexBuffer;
20 |
21 | SquareModel() {
22 | // (# of coordinate values * 4 bytes per float)
23 | ByteBuffer bb = ByteBuffer.allocateDirect(COORDS.length * 4);
24 |
25 | bb.order(ByteOrder.nativeOrder());
26 | mVertexBuffer = bb.asFloatBuffer();
27 | mVertexBuffer.put(COORDS);
28 | mVertexBuffer.position(0);
29 | }
30 |
31 | void draw(int posHandle) {
32 | glEnableVertexAttribArray(posHandle);
33 | glVertexAttribPointer(
34 | posHandle, COORDS_PER_VERTEX,
35 | GL_FLOAT, false,
36 | STRIDE, mVertexBuffer);
37 | glDrawArrays(GL_TRIANGLE_STRIP, 0, COORDS.length / 3);
38 | glDisableVertexAttribArray(posHandle);
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/gl/TexturePool.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.gl;
2 |
3 | import android.util.Log;
4 |
5 | import java.util.HashSet;
6 | import java.util.Set;
7 |
8 | public class TexturePool extends GLResource {
9 | private static final String TAG = "TexturePool";
10 |
11 | public static TexturePool getInstance() {
12 | return GLCore.getInstance().getComponent(TexturePool.class, TexturePool::new);
13 | }
14 |
15 | public static Texture get(int width, int height, int channels, Texture.Format format) {
16 | return getInstance().getTex(width, height, channels, format);
17 | }
18 |
19 | public static Texture get(Texture texture) {
20 | return get(texture.getWidth(), texture.getHeight(), texture.getChannels(),
21 | texture.getFormat());
22 | }
23 |
24 | private final Set mPool = new HashSet<>();
25 | private final Set mGrants = new HashSet<>();
26 |
27 | private Texture getTex(int width, int height, int channels, Texture.Format format) {
28 | Texture texture = null;
29 | for (Texture tex : mPool) {
30 | if (tex.getWidth() == width && tex.getHeight() == height
31 | && tex.getChannels() == channels && tex.getFormat() == format) {
32 | mPool.remove(tex);
33 | texture = tex;
34 | break;
35 | }
36 | }
37 |
38 | if (texture == null) {
39 | texture = new Texture(width, height, channels, format, null);
40 | Log.d(TAG, "Created " + texture + ": " + width + "x" + height + " (" + channels + " ch)");
41 | }
42 |
43 | Texture tex = texture;
44 | mGrants.add(tex);
45 | tex.setCloseOverride(() -> {
46 | mGrants.remove(tex);
47 | mPool.add(tex);
48 | tex.setCloseOverride(() -> {
49 | throw new RuntimeException("Attempting to close " + tex + " twice");
50 | });
51 | });
52 |
53 | return tex;
54 | }
55 |
56 | @Override
57 | public void release() {
58 | for (Texture texture : mPool) {
59 | texture.setCloseOverride(null);
60 | texture.close();
61 | }
62 | mPool.clear();
63 | }
64 |
65 | public static void logLeaks() {
66 | for (Texture tex : getInstance().mGrants) {
67 | Log.d(TAG, "Leaked texture: " + tex);
68 | }
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/math/BlockDivider.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.math;
2 |
3 | public class BlockDivider {
4 | private final int mSize;
5 | private final int mBlock;
6 | private int mPassed;
7 |
8 | public BlockDivider(int size, int block) {
9 | mSize = size;
10 | mBlock = block;
11 | }
12 |
13 | // Out: pos, size
14 | public boolean nextBlock(int[] out) {
15 | final int remaining = mSize - mPassed;
16 | if (remaining > 0) {
17 | out[0] = mPassed;
18 | if (remaining >= mBlock) {
19 | out[1] = mBlock;
20 | } else {
21 | out[1] = remaining;
22 | }
23 | mPassed += out[1];
24 | return true;
25 | }
26 | return false;
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/math/Convolve.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.math;
2 |
3 | public class Convolve {
4 | public static float[] conv(float[] in, float[] conv, boolean cut) {
5 | float[] out = new float[in.length + conv.length - 1];
6 | float[] inCopy = new float[out.length + conv.length - 1];
7 | System.arraycopy(in, 0, inCopy, conv.length - 1, in.length);
8 |
9 | for (int i = 0; i < conv.length - 1; i++) {
10 | inCopy[i] = in[0];
11 | }
12 | for (int i = out.length; i < inCopy.length; i++) {
13 | inCopy[i] = in[in.length - 1];
14 | }
15 |
16 | for (int i = 0; i < out.length; i++) {
17 | for (int j = 0; j < conv.length; j++) {
18 | out[i] += inCopy[conv.length - 1 + i - j] * conv[j];
19 | }
20 | }
21 | if (cut) {
22 | float[] outCut = new float[in.length];
23 | System.arraycopy(out, conv.length - 1, outCut, 0, outCut.length);
24 | return outCut;
25 | }
26 | return out;
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/math/Histogram.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.math;
2 |
3 | public class Histogram {
4 | private static final int HIST_BINS = 256;
5 | private static final double EPSILON = 0.01;
6 | private static final float LINEARIZE_PERCEPTION = 2.4f;
7 |
8 | public final float[] sigma = new float[3];
9 | public final float[] hist;
10 | public final float gamma;
11 | public final float logAvgLuminance;
12 |
13 | public Histogram(float[] f, int whPixels) {
14 | int[] histv = new int[HIST_BINS];
15 |
16 | double logTotalLuminance = 0d;
17 | // Loop over all values
18 | for (int i = 0; i < f.length; i += 4) {
19 | for (int j = 0; j < 3; j++) {
20 | sigma[j] += f[i + j];
21 | }
22 |
23 | int bin = (int) (f[i + 3] * HIST_BINS);
24 | if (bin < 0) bin = 0;
25 | if (bin >= HIST_BINS) bin = HIST_BINS - 1;
26 | histv[bin]++;
27 |
28 | logTotalLuminance += Math.log(f[i + 3] + EPSILON);
29 | }
30 |
31 | logAvgLuminance = (float) Math.exp(logTotalLuminance * 4 / f.length);
32 | for (int j = 0; j < 3; j++) {
33 | sigma[j] /= whPixels;
34 | }
35 |
36 | //limitHighlightContrast(histv, f.length / 4);
37 | float[] cumulativeHist = buildCumulativeHist(histv);
38 |
39 | // Find gamma: Inverse of the average exponent.
40 | gamma = findGamma(cumulativeHist);
41 |
42 | // Compensate for the gamma being applied first.
43 | for (int i = 1; i <= HIST_BINS; i++) {
44 | double id = (double) i / HIST_BINS;
45 | cumulativeHist[i] *= id / Math.pow(id, gamma);
46 | }
47 |
48 | // Limit contrast and banding.
49 | float[] tmp = new float[cumulativeHist.length];
50 | for (int i = cumulativeHist.length - 1; i > 0; i--) {
51 | System.arraycopy(cumulativeHist, 0, tmp, 0, i);
52 | for (int j = i; j < cumulativeHist.length - 1; j++) {
53 | tmp[j] = (cumulativeHist[j - 1] + cumulativeHist[j + 1]) * 0.5f;
54 | }
55 | tmp[tmp.length - 1] = cumulativeHist[cumulativeHist.length - 1];
56 |
57 | float[] swp = tmp;
58 | tmp = cumulativeHist;
59 | cumulativeHist = swp;
60 | }
61 |
62 | // Crush shadows.
63 | crushShadows(cumulativeHist);
64 |
65 | hist = cumulativeHist;
66 | }
67 |
68 | private static float[] buildCumulativeHist(int[] hist) {
69 | float[] cumulativeHist = new float[HIST_BINS + 1];
70 | for (int i = 1; i < cumulativeHist.length; i++) {
71 | cumulativeHist[i] = cumulativeHist[i - 1] + hist[i - 1];
72 | }
73 | float max = cumulativeHist[HIST_BINS];
74 | for (int i = 0; i < cumulativeHist.length; i++) {
75 | cumulativeHist[i] /= max;
76 | }
77 | return cumulativeHist;
78 | }
79 |
80 | private static float findGamma(float[] cumulativeHist) {
81 | float sumExponent = 0.f;
82 | int exponentCounted = 0;
83 | for (int i = 0; i <= HIST_BINS; i++) {
84 | float val = cumulativeHist[i];
85 | if (val > 0.001f) {
86 | // Which power of the input is the output.
87 | double exponent = Math.log(cumulativeHist[i]) / Math.log((double) i / HIST_BINS);
88 | if (exponent > 0f && exponent < 10f) {
89 | sumExponent += exponent;
90 | exponentCounted++;
91 | }
92 | }
93 | }
94 | return LINEARIZE_PERCEPTION * sumExponent / exponentCounted;
95 | }
96 |
97 | private static void crushShadows(float[] cumulativeHist) {
98 | for (int i = 0; i < cumulativeHist.length; i++) {
99 | float og = (float) i / cumulativeHist.length;
100 | float a = Math.min(1f, og / 0.02f);
101 | if (a == 1f) {
102 | break;
103 | }
104 | cumulativeHist[i] *= Math.pow(a, 3.f);
105 | }
106 | }
107 |
108 | // Shift highlights down
109 | private static void limitHighlightContrast(int[] clippedHist, int valueCount) {
110 | for (int i = clippedHist.length - 1; i >= clippedHist.length / 4; i--) {
111 | int limit = 4 * valueCount / i;
112 |
113 | if (clippedHist[i] > limit) {
114 | int removed = clippedHist[i] - limit;
115 | clippedHist[i] = limit;
116 |
117 | for (int j = i - 1; j >= 0; j--) {
118 | int space = limit - clippedHist[j];
119 | if (space > 0) {
120 | int allocate = Math.min(removed, space);
121 | clippedHist[j] += allocate;
122 | removed -= allocate;
123 | if (removed == 0) {
124 | break;
125 | }
126 | }
127 | }
128 | }
129 | }
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/params/ProcessParams.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.params;
2 |
3 | import amirz.dngprocessor.Preferences;
4 |
5 | public class ProcessParams {
6 | public static ProcessParams getPreset(Preferences.PostProcessMode mode) {
7 | ProcessParams process = new ProcessParams();
8 | switch (mode) {
9 | case Disabled:
10 | process.sharpenFactor = 0f;
11 | process.histFactor = 0f;
12 | process.adaptiveSaturation = new float[] { 0f, 1f };
13 | break;
14 | case Natural:
15 | process.sharpenFactor = 0.25f;
16 | process.histFactor = 0.6f;
17 | process.adaptiveSaturation = new float[] { 2.5f, 4f };
18 | break;
19 | case Boosted:
20 | process.sharpenFactor = 0.35f;
21 | process.histFactor = 1f;
22 | process.adaptiveSaturation = new float[] { 3f, 2f };
23 | break;
24 | }
25 | return process;
26 | }
27 |
28 | public float sharpenFactor;
29 | public float histFactor;
30 | public float histCurve;
31 |
32 | public int denoiseFactor;
33 | public float[] saturationMap;
34 | public float satLimit;
35 | public float[] adaptiveSaturation;
36 | public boolean exposeFuse;
37 | public boolean lce;
38 | public boolean ahe;
39 |
40 | private ProcessParams() {
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/params/SensorParams.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.params;
2 |
3 | import android.util.Rational;
4 |
5 | public class SensorParams {
6 | public int inputWidth;
7 | public int inputHeight;
8 | public int inputStride;
9 | public int cfa;
10 | public byte[] cfaVal;
11 | public int[] blackLevelPattern;
12 | public int whiteLevel;
13 | public int referenceIlluminant1;
14 | public int referenceIlluminant2;
15 | public float[] calibrationTransform1;
16 | public float[] calibrationTransform2;
17 | public float[] colorMatrix1;
18 | public float[] colorMatrix2;
19 | public float[] forwardTransform1;
20 | public float[] forwardTransform2;
21 | public float[] neutralColorPoint;
22 | public float[] noiseProfile;
23 | public int outputOffsetX;
24 | public int outputOffsetY;
25 | public float[] gainMap;
26 | public int[] gainMapSize;
27 | public short[] hotPixels = new short[1];
28 | public int[] hotPixelsSize = new int[] { 1, 1 };
29 | }
30 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/parser/ByteReader.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.parser;
2 |
3 | import android.content.Context;
4 | import android.net.Uri;
5 | import android.support.media.ExifInterface;
6 |
7 | import java.io.ByteArrayInputStream;
8 | import java.io.ByteArrayOutputStream;
9 | import java.io.IOException;
10 | import java.io.InputStream;
11 | import java.nio.ByteBuffer;
12 | import java.nio.ByteOrder;
13 |
14 | public class ByteReader {
15 | public static class ReaderWithExif {
16 | public final ExifInterface exif;
17 | public final ByteBuffer wrap;
18 | public final int length;
19 |
20 | private ReaderWithExif(ExifInterface exif, byte[] bytes) {
21 | this.exif = exif;
22 | length = bytes.length;
23 | wrap = wrap(bytes);
24 | }
25 | }
26 |
27 | public static ReaderWithExif fromUri(Context context, Uri uri) {
28 | byte[] bytes = null;
29 | try (InputStream stream = context.getContentResolver().openInputStream(uri)) {
30 | if (stream != null) {
31 | bytes = fromStream(stream);
32 | }
33 | } catch (IOException e) {
34 | e.printStackTrace();
35 | }
36 |
37 | if (bytes != null) {
38 | ExifInterface exif = null;
39 | try (ByteArrayInputStream stream = new ByteArrayInputStream(bytes)) {
40 | exif = new ExifInterface(stream);
41 | } catch (IOException e) {
42 | e.printStackTrace();
43 | }
44 |
45 | if (exif != null) {
46 | return new ReaderWithExif(exif, bytes);
47 | }
48 | }
49 | return null;
50 | }
51 |
52 | private static byte[] fromStream(InputStream inputStream) throws IOException {
53 | ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
54 | int bufferSize = 1024;
55 | byte[] buffer = new byte[bufferSize];
56 |
57 | int len;
58 | while ((len = inputStream.read(buffer)) != -1) {
59 | byteBuffer.write(buffer, 0, len);
60 | }
61 | return byteBuffer.toByteArray();
62 | }
63 |
64 | public static ByteBuffer wrap(byte[] b) {
65 | return ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN);
66 | }
67 |
68 | public static ByteBuffer wrapBigEndian(byte[] b) {
69 | return ByteBuffer.wrap(b).order(ByteOrder.BIG_ENDIAN);
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/parser/CFAPattern.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.parser;
2 |
3 | import android.hardware.camera2.CameraCharacteristics;
4 |
5 | import java.util.Arrays;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class CFAPattern {
10 | private static final Map PATTERNS = new HashMap<>();
11 |
12 | static {
13 | PATTERNS.put(new byte[] { 0, 1, 1, 2 },
14 | CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB);
15 |
16 | PATTERNS.put(new byte[] { 1, 0, 2, 1 },
17 | CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG);
18 |
19 | PATTERNS.put(new byte[] { 1, 2, 0, 1 },
20 | CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG);
21 |
22 | PATTERNS.put(new byte[] { 2, 1, 1, 0 },
23 | CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR);
24 | }
25 |
26 | public static int get(byte[] cfaValues) {
27 | for (Map.Entry kvp : PATTERNS.entrySet()) {
28 | if (Arrays.equals(kvp.getKey(), cfaValues)) {
29 | return kvp.getValue();
30 | }
31 | }
32 | return -1;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/parser/OpParser.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.parser;
2 |
3 | import android.util.Log;
4 |
5 | import java.nio.ByteBuffer;
6 |
7 | public class OpParser {
8 | private static final String TAG = "OpParser";
9 |
10 | public static class GainMap {
11 | public int top;
12 | public int left;
13 | public int bottom;
14 | public int right;
15 | public int plane;
16 | public int planes;
17 | public int rowPitch;
18 | public int colPitch;
19 | public int mapPointsV;
20 | public int mapPointsH;
21 | public double mapSpacingV;
22 | public double mapSpacingH;
23 | public double mapOriginV;
24 | public double mapOriginH;
25 | public int mapPlanes;
26 | public float[] px;
27 | }
28 |
29 | private static GainMap parseGainMap(ByteBuffer reader) {
30 | GainMap map = new GainMap();
31 | map.top = reader.getInt();
32 | map.left = reader.getInt();
33 | map.bottom = reader.getInt();
34 | map.right = reader.getInt();
35 | map.plane = reader.getInt();
36 | map.planes = reader.getInt();
37 | map.rowPitch = reader.getInt();
38 | map.colPitch = reader.getInt();
39 | map.mapPointsV = reader.getInt();
40 | map.mapPointsH = reader.getInt();
41 | map.mapSpacingV = reader.getDouble();
42 | map.mapSpacingH = reader.getDouble();
43 | map.mapOriginV = reader.getDouble();
44 | map.mapOriginH = reader.getDouble();
45 | map.mapPlanes = reader.getInt();
46 | map.px = new float[map.mapPointsH * map.mapPointsV];
47 |
48 | if (map.mapPlanes != 1) {
49 | throw new IllegalArgumentException("GainMap.mapPlanes can only be 1");
50 | }
51 |
52 | for (int x = 0; x < map.mapPointsH; x++) {
53 | for (int y = 0; y < map.mapPointsV; y++) {
54 | map.px[x * map.mapPointsV + y] = reader.getFloat();
55 | }
56 | }
57 |
58 | return map;
59 | }
60 |
61 | public static Object[] parseAll(byte[] input) {
62 | ByteBuffer reader = ByteReader.wrapBigEndian(input);
63 |
64 | Object[] ops = new Object[reader.getInt()];
65 | for (int i = 0; i < ops.length; i++) {
66 | int id = reader.getInt();
67 | byte[] ver = { reader.get(), reader.get(), reader.get(), reader.get() };
68 | int flags = reader.getInt();
69 | int size = reader.getInt();
70 | Log.d(TAG, "OpCode " + id + " with size " + size);
71 |
72 | if (id == 9) {
73 | ops[i] = parseGainMap(reader);
74 | } else {
75 | reader.position(reader.position() + size);
76 | }
77 | }
78 | return ops;
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/parser/TIFF.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.parser;
2 |
3 | import android.util.SparseIntArray;
4 |
5 | public class TIFF {
6 | public static final int TAG_NewSubfileType = 254;
7 | public static final int TAG_ImageWidth = 256;
8 | public static final int TAG_ImageLength = 257;
9 | public static final int TAG_BitsPerSample = 258;
10 | public static final int TAG_Compression = 259;
11 | public static final int TAG_PhotometricInterpretation = 262;
12 | public static final int TAG_ImageDescription = 270;
13 | public static final int TAG_Make = 271;
14 | public static final int TAG_Model = 272;
15 | public static final int TAG_StripOffsets = 273;
16 | public static final int TAG_Orientation = 274;
17 | public static final int TAG_SamplesPerPixel = 277;
18 | public static final int TAG_RowsPerStrip = 278;
19 | public static final int TAG_StripByteCounts = 279;
20 | public static final int TAG_XResolution = 282;
21 | public static final int TAG_YResolution = 283;
22 | public static final int TAG_PlanarConfiguration = 284;
23 | public static final int TAG_ResolutionUnit = 296;
24 | public static final int TAG_Software = 305;
25 | public static final int TAG_Hardware = 306;
26 | public static final int TAG_SubIFDs = 330;
27 | public static final int TAG_CFARepeatPatternDim = 33421;
28 | public static final int TAG_CFAPattern = 33422;
29 | public static final int TAG_Copyright = 33432;
30 | public static final int TAG_ExposureTime = 33434;
31 | public static final int TAG_FNumber = 33437;
32 | public static final int TAG_ISOSpeedRatings = 34855;
33 | public static final int TAG_DateTimeOriginal = 36867;
34 | public static final int TAG_FocalLength = 37386;
35 | public static final int TAG_EPStandardID = 37398;
36 | public static final int TAG_DNGVersion = 50706;
37 | public static final int TAG_DNGBackwardVersion = 50707;
38 | public static final int TAG_UniqueCameraModel = 50708;
39 | public static final int TAG_CFAPlaneColor = 50710;
40 | public static final int TAG_CFALayout = 50711;
41 | public static final int TAG_BlackLevelRepeatDim = 50713;
42 | public static final int TAG_BlackLevel = 50714;
43 | public static final int TAG_WhiteLevel = 50717;
44 | public static final int TAG_DefaultScale = 50718;
45 | public static final int TAG_DefaultCropOrigin = 50719;
46 | public static final int TAG_DefaultCropSize = 50720;
47 | public static final int TAG_ColorMatrix1 = 50721;
48 | public static final int TAG_ColorMatrix2 = 50722;
49 | public static final int TAG_CameraCalibration1 = 50723;
50 | public static final int TAG_CameraCalibration2 = 50724;
51 | public static final int TAG_AsShotNeutral = 50728;
52 | public static final int TAG_CalibrationIlluminant1 = 50778;
53 | public static final int TAG_CalibrationIlluminant2 = 50779;
54 | public static final int TAG_ActiveArea = 50829;
55 | public static final int TAG_ForwardMatrix1 = 50964;
56 | public static final int TAG_ForwardMatrix2 = 50965;
57 | public static final int TAG_OpcodeList2 = 51009;
58 | public static final int TAG_OpcodeList3 = 51022;
59 | public static final int TAG_NoiseProfile = 51041;
60 |
61 | public static final int TYPE_Byte = 1;
62 | public static final int TYPE_String = 2;
63 | public static final int TYPE_UInt_16 = 3;
64 | public static final int TYPE_UInt_32 = 4;
65 | public static final int TYPE_UFrac = 5;
66 | public static final int TYPE_Undef = 7;
67 | public static final int TYPE_Frac = 10;
68 | public static final int TYPE_Double = 12;
69 |
70 | public static final SparseIntArray TYPE_SIZES = new SparseIntArray();
71 |
72 | static {
73 | TYPE_SIZES.append(TYPE_Byte, 1);
74 | TYPE_SIZES.append(TYPE_String, 1);
75 | TYPE_SIZES.append(TYPE_UInt_16, 2);
76 | TYPE_SIZES.append(TYPE_UInt_32, 4);
77 | TYPE_SIZES.append(TYPE_UFrac, 8);
78 | TYPE_SIZES.append(TYPE_Undef, 1);
79 | TYPE_SIZES.append(TYPE_Frac, 8);
80 | TYPE_SIZES.append(TYPE_Double, 8);
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/parser/TIFFTag.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.parser;
2 |
3 | import android.util.Rational;
4 |
5 | public class TIFFTag {
6 | private final int mType;
7 | private final Object[] mValues;
8 |
9 | public TIFFTag(int type, Object[] value) {
10 | mType = type;
11 | mValues = value;
12 | }
13 |
14 | private TIFFTag() {
15 | mType = 0;
16 | mValues = null;
17 | }
18 |
19 | protected Object[] getValues() {
20 | return mValues;
21 | }
22 |
23 | public int getInt() {
24 | return (int) getValues()[0];
25 | }
26 |
27 | public float getFloat() {
28 | return getRational().floatValue();
29 | }
30 |
31 | public Rational getRational() {
32 | return (Rational) getValues()[0];
33 | }
34 |
35 | public byte[] getByteArray() {
36 | Object[] values = getValues();
37 | byte[] ints = new byte[values.length];
38 | for (int i = 0; i < ints.length; i++) {
39 | if (mType == TIFF.TYPE_Byte || mType == TIFF.TYPE_Undef) {
40 | ints[i] = (byte) values[i];
41 | }
42 | }
43 | return ints;
44 | }
45 |
46 | public int[] getIntArray() {
47 | Object[] values = getValues();
48 | int[] ints = new int[values.length];
49 | for (int i = 0; i < ints.length; i++) {
50 | if (mType == TIFF.TYPE_Byte || mType == TIFF.TYPE_Undef) {
51 | ints[i] = (byte) values[i] & 0xFF;
52 | } else if (mType == TIFF.TYPE_UInt_16 || mType == TIFF.TYPE_UInt_32) {
53 | ints[i] = (int) values[i];
54 | } else if (mType == TIFF.TYPE_Frac || mType == TIFF.TYPE_UFrac) {
55 | ints[i] = (int)((Rational) values[i]).floatValue();
56 | }
57 | }
58 | return ints;
59 | }
60 |
61 | public float[] getFloatArray() {
62 | Object[] values = getValues();
63 | float[] floats = new float[values.length];
64 | for (int i = 0; i < floats.length; i++) {
65 | if (mType == TIFF.TYPE_Frac || mType == TIFF.TYPE_UFrac) {
66 | floats[i] = ((Rational) values[i]).floatValue();
67 | } else if (mType == TIFF.TYPE_Double) {
68 | floats[i] = ((Double) values[i]).floatValue();
69 | }
70 | }
71 | return floats;
72 | }
73 |
74 | public Rational[] getRationalArray() {
75 | Object[] values = getValues();
76 | Rational[] rationals = new Rational[values.length];
77 | for (int i = 0; i < rationals.length; i++) {
78 | if (mType == TIFF.TYPE_Frac || mType == TIFF.TYPE_UFrac) {
79 | rationals[i] = (Rational) values[i];
80 | }
81 | }
82 | return rationals;
83 | }
84 |
85 | @Override
86 | public String toString() {
87 | Object[] values = getValues();
88 | StringBuilder buffer = new StringBuilder();
89 | if (mType == TIFF.TYPE_String) {
90 | for (Object b : values) {
91 | buffer.append((char) b);
92 | }
93 | } else {
94 | for (int elementNum = 0; elementNum < values.length && elementNum < 20; elementNum++) {
95 | Object element = values[elementNum];
96 | if (element != null) {
97 | buffer.append(element.toString()).append(" ");
98 | }
99 | }
100 | }
101 | return buffer.toString();
102 | }
103 |
104 | static TIFFTag exceptionWrapper(int id) {
105 | return new TIFFTag() {
106 | @Override
107 | protected Object[] getValues() {
108 | throw new TIFFTagException("TIFF tag " + id + " not found");
109 | }
110 | };
111 | }
112 |
113 | public static class TIFFTagException extends RuntimeException {
114 | private TIFFTagException(String s) {
115 | super(s);
116 | }
117 | }
118 | }
119 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/parser/TagParser.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.parser;
2 |
3 | import android.util.Rational;
4 | import android.util.SparseArray;
5 |
6 | import java.nio.ByteBuffer;
7 | import java.nio.ByteOrder;
8 |
9 | class TagParser {
10 | static SparseArray parse(ByteBuffer wrap) {
11 | short tagCount = wrap.getShort();
12 | SparseArray tags = new SparseArray<>(tagCount);
13 |
14 | for (int tagNum = 0; tagNum < tagCount; tagNum++) {
15 | int tag = wrap.getShort() & 0xFFFF;
16 | int type = wrap.getShort() & 0xFFFF;
17 | int elementCount = wrap.getInt();
18 | int elementSize = TIFF.TYPE_SIZES.get(type);
19 |
20 | byte[] buffer = new byte[Math.max(4, elementCount * elementSize)];
21 | if (buffer.length == 4) {
22 | wrap.get(buffer);
23 | } else {
24 | int dataPos = wrap.getInt();
25 | independentMove(wrap, dataPos).get(buffer);
26 | }
27 |
28 | ByteBuffer valueWrap = ByteReader.wrap(buffer);
29 | Object[] values = new Object[elementCount];
30 | for (int elementNum = 0; elementNum < elementCount; elementNum++) {
31 | if (type == TIFF.TYPE_Byte || type == TIFF.TYPE_Undef) {
32 | values[elementNum] = valueWrap.get();
33 | } else if (type == TIFF.TYPE_String) {
34 | values[elementNum] = (char) valueWrap.get();
35 | } else if (type == TIFF.TYPE_UInt_16) {
36 | values[elementNum] = valueWrap.getShort() & 0xFFFF;
37 | } else if (type == TIFF.TYPE_UInt_32) {
38 | values[elementNum] = valueWrap.getInt();
39 | } else if (type == TIFF.TYPE_UFrac) {
40 | values[elementNum] = new Rational(valueWrap.getInt(), valueWrap.getInt());
41 | } else if (type == TIFF.TYPE_Frac) {
42 | values[elementNum] = new Rational(valueWrap.getInt(), valueWrap.getInt());
43 | } else if (type == TIFF.TYPE_Double) {
44 | values[elementNum] = valueWrap.getDouble();
45 | }
46 | }
47 |
48 | tags.append(tag, new TIFFTag(type, values));
49 | }
50 |
51 | return tags;
52 | }
53 |
54 | private static ByteBuffer independentMove(ByteBuffer wrap, int position) {
55 | return (ByteBuffer) wrap.duplicate().order(ByteOrder.LITTLE_ENDIAN).position(position);
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/GLBlockProcessing.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline;
2 |
3 | import android.graphics.Bitmap;
4 |
5 | import java.nio.IntBuffer;
6 |
7 | import amirz.dngprocessor.gl.GLPrograms;
8 | import amirz.dngprocessor.math.BlockDivider;
9 |
10 | import static amirz.dngprocessor.util.Constants.BLOCK_HEIGHT;
11 | import static android.opengl.GLES20.*;
12 |
13 | public class GLBlockProcessing {
14 | private final Bitmap mOut;
15 | private final int mOutWidth, mOutHeight;
16 | private final IntBuffer mBlockBuffer;
17 | private final IntBuffer mOutBuffer;
18 |
19 | public GLBlockProcessing(Bitmap out) {
20 | mOut = out;
21 | mOutWidth = out.getWidth();
22 | mOutHeight = out.getHeight();
23 |
24 | mBlockBuffer = IntBuffer.allocate(mOutWidth * BLOCK_HEIGHT);
25 | mOutBuffer = IntBuffer.allocate(mOutWidth * mOutHeight);
26 | }
27 |
28 | public void drawBlocksToOutput(GLPrograms gl) {
29 | BlockDivider divider = new BlockDivider(mOutHeight, BLOCK_HEIGHT);
30 | int[] row = new int[2];
31 | while (divider.nextBlock(row)) {
32 | int y = row[0];
33 | int height = row[1];
34 |
35 | glViewport(0, 0, mOutWidth, height);
36 | gl.seti("yOffset", y);
37 | gl.draw();
38 |
39 | mBlockBuffer.position(0);
40 | glReadPixels(0, 0, mOutWidth, height, GL_RGBA, GL_UNSIGNED_BYTE, mBlockBuffer);
41 | if (height < BLOCK_HEIGHT) {
42 | // This can only happen once
43 | int[] data = new int[mOutWidth * height];
44 | mBlockBuffer.get(data);
45 | mOutBuffer.put(data);
46 | } else {
47 | mOutBuffer.put(mBlockBuffer);
48 | }
49 | }
50 |
51 | mOutBuffer.position(0);
52 | mOut.copyPixelsFromBuffer(mOutBuffer);
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/Stage.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline;
2 |
3 | import amirz.dngprocessor.gl.GLPrograms;
4 | import amirz.dngprocessor.params.ProcessParams;
5 | import amirz.dngprocessor.params.SensorParams;
6 |
7 | public abstract class Stage implements AutoCloseable {
8 | private GLPrograms mConverter;
9 | private SensorParams mSensor;
10 | private ProcessParams mProcess;
11 |
12 | public void init(GLPrograms converter, SensorParams sensor, ProcessParams process) {
13 | mConverter = converter;
14 | mSensor = sensor;
15 | mProcess = process;
16 | }
17 |
18 | protected GLPrograms getConverter() {
19 | return mConverter;
20 | }
21 |
22 | protected SensorParams getSensorParams() {
23 | return mSensor;
24 | }
25 |
26 | protected ProcessParams getProcessParams() {
27 | return mProcess;
28 | }
29 |
30 | protected boolean isEnabled() {
31 | return true;
32 | }
33 |
34 | protected abstract void execute(StagePipeline.StageMap previousStages);
35 |
36 | public abstract int getShader();
37 |
38 | @Override
39 | public void close() {
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/StagePipeline.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline;
2 |
3 | import android.graphics.Bitmap;
4 | import android.util.Log;
5 |
6 | import java.util.ArrayList;
7 | import java.util.List;
8 |
9 | import amirz.dngprocessor.colorspace.ColorspaceConverter;
10 | import amirz.dngprocessor.gl.GLCore;
11 | import amirz.dngprocessor.gl.GLPrograms;
12 | import amirz.dngprocessor.util.ShaderLoader;
13 | import amirz.dngprocessor.gl.TexturePool;
14 | import amirz.dngprocessor.params.ProcessParams;
15 | import amirz.dngprocessor.params.SensorParams;
16 | import amirz.dngprocessor.pipeline.convert.EdgeMirror;
17 | import amirz.dngprocessor.pipeline.convert.GreenDemosaic;
18 | import amirz.dngprocessor.pipeline.convert.PreProcess;
19 | import amirz.dngprocessor.pipeline.convert.ToIntermediate;
20 | import amirz.dngprocessor.pipeline.exposefuse.Laplace;
21 | import amirz.dngprocessor.pipeline.exposefuse.Merge;
22 | import amirz.dngprocessor.pipeline.exposefuse.DoubleExpose;
23 | import amirz.dngprocessor.pipeline.intermediate.BilateralFilter;
24 | import amirz.dngprocessor.pipeline.intermediate.Analysis;
25 | import amirz.dngprocessor.pipeline.intermediate.MergeDetail;
26 | import amirz.dngprocessor.pipeline.post.BlurLCE;
27 | import amirz.dngprocessor.pipeline.post.ToneMap;
28 |
29 | import static amirz.dngprocessor.util.Constants.BLOCK_HEIGHT;
30 |
31 | public class StagePipeline implements AutoCloseable {
32 | private static final String TAG = "StagePipeline";
33 |
34 | private final List mStages = new ArrayList<>();
35 |
36 | private final SensorParams mSensor;
37 | private final ProcessParams mProcess;
38 | private final GLPrograms mConverter;
39 | private final GLBlockProcessing mBlockProcessing;
40 |
41 | public StagePipeline(SensorParams sensor, ProcessParams process,
42 | byte[] raw, Bitmap argbOutput, ShaderLoader loader) {
43 | mSensor = sensor;
44 | mProcess = process;
45 |
46 | int outWidth = argbOutput.getWidth();
47 | int outHeight = argbOutput.getHeight();
48 |
49 | if (outWidth + sensor.outputOffsetX > sensor.inputWidth
50 | || outHeight + sensor.outputOffsetY > sensor.inputHeight) {
51 | throw new IllegalArgumentException("Raw image with dimensions (w=" + sensor.inputWidth
52 | + ", h=" + sensor.inputHeight
53 | + "), cannot converted into sRGB image with dimensions (w="
54 | + outWidth + ", h=" + outHeight + ").");
55 | }
56 | Log.d(TAG, "Output width,height: " + outWidth + "," + outHeight);
57 |
58 | GLCore.getInstance().setDimens(argbOutput.getWidth(), BLOCK_HEIGHT);
59 | mConverter = GLPrograms.getInstance(loader);
60 | mBlockProcessing = new GLBlockProcessing(argbOutput);
61 |
62 | ColorspaceConverter colorspace = new ColorspaceConverter(sensor);
63 |
64 | // RAW -> XYZ -> xyY
65 | addStage(new PreProcess(raw));
66 | addStage(new GreenDemosaic());
67 | addStage(new ToIntermediate(colorspace.sensorToXYZ_D50));
68 | addStage(new EdgeMirror());
69 |
70 | // Noise Reduce
71 | //addStage(new Decompose());
72 | //addStage(new NoiseMap());
73 | //addStage(new NoiseReduce(sensor, process));
74 |
75 | // Exposure Fusion: Compress Dynamic Range
76 | addStage(new DoubleExpose());
77 | addStage(new Laplace());
78 | addStage(new Merge());
79 |
80 | // Contrast Enhancement: Tone Mapping
81 | addStage(new Analysis(outWidth, outHeight,
82 | sensor.outputOffsetX, sensor.outputOffsetY));
83 | addStage(new BilateralFilter(process));
84 | addStage(new MergeDetail(process));
85 |
86 | // xyY -> XYZ -> sRGB
87 | addStage(new BlurLCE());
88 | addStage(new ToneMap(colorspace.XYZtoProPhoto,
89 | colorspace.proPhotoToSRGB));
90 | }
91 |
92 | private void addStage(Stage stage) {
93 | stage.init(mConverter, mSensor, mProcess);
94 | mStages.add(stage);
95 | }
96 |
97 | public void execute(OnProgressReporter reporter) {
98 | int stageCount = mStages.size();
99 | for (int i = 0; i < stageCount; i++) {
100 | Stage stage = mStages.get(i);
101 | reporter.onProgress(i, stageCount, stage.getClass().getSimpleName());
102 | if (stage.isEnabled()) {
103 | mConverter.useProgram(stage.getShader());
104 | stage.execute(new StageMap(mStages.subList(0, i)));
105 | }
106 | }
107 |
108 | // Assume that last stage set everything but did not render yet.
109 | mBlockProcessing.drawBlocksToOutput(mConverter);
110 |
111 | reporter.onProgress(stageCount, stageCount, "Done");
112 | }
113 |
114 | @Override
115 | public void close() {
116 | for (Stage stage : mStages) {
117 | stage.close();
118 | }
119 | mStages.clear();
120 | TexturePool.logLeaks();
121 |
122 | //mConverter.close();
123 | //GLCore.closeContext();
124 | }
125 |
126 | public interface OnProgressReporter {
127 | void onProgress(int completed, int total, String tag);
128 | }
129 |
130 | public static class StageMap {
131 | private final List mStages;
132 |
133 | private StageMap(List stages) {
134 | mStages = stages;
135 | }
136 |
137 | @SuppressWarnings("unchecked")
138 | public T getStage(Class cls) {
139 | for (Stage stage : mStages) {
140 | if (stage.getClass() == cls) {
141 | return (T) stage;
142 | }
143 | }
144 | return null;
145 | }
146 | }
147 | }
148 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/convert/EdgeMirror.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.convert;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.pipeline.Stage;
7 | import amirz.dngprocessor.pipeline.StagePipeline;
8 |
9 | public class EdgeMirror extends Stage {
10 | private Texture mIntermediate;
11 |
12 | public Texture getIntermediate() {
13 | return mIntermediate;
14 | }
15 |
16 | @Override
17 | protected void execute(StagePipeline.StageMap previousStages) {
18 | GLPrograms converter = getConverter();
19 |
20 | ToIntermediate toIntermediate = previousStages.getStage(ToIntermediate.class);
21 | mIntermediate = toIntermediate.getIntermediate();
22 | int w = mIntermediate.getWidth();
23 | int h = mIntermediate.getHeight();
24 |
25 | converter.setTexture("intermediateBuffer", mIntermediate);
26 |
27 | int offsetX = getSensorParams().outputOffsetX;
28 | int offsetY = getSensorParams().outputOffsetY;
29 | converter.seti("minxy", offsetX, offsetY);
30 | converter.seti("maxxy", w - offsetX - 1, h - offsetY - 1);
31 |
32 | converter.drawBlocks(mIntermediate, false);
33 | }
34 |
35 | @Override
36 | public int getShader() {
37 | return R.raw.stage1_4_edge_mirror_fs;
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/convert/GreenDemosaic.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.convert;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.pipeline.Stage;
7 | import amirz.dngprocessor.pipeline.StagePipeline;
8 |
9 | public class GreenDemosaic extends Stage {
10 | private Texture mSensorG;
11 |
12 | public Texture getSensorGTex() {
13 | return mSensorG;
14 | }
15 |
16 | @Override
17 | protected void execute(StagePipeline.StageMap previousStages) {
18 | GLPrograms converter = getConverter();
19 |
20 | PreProcess preProcess = previousStages.getStage(PreProcess.class);
21 |
22 | // Load old texture
23 | Texture sensorTex = previousStages.getStage(PreProcess.class).getSensorTex();
24 | converter.setTexture("rawBuffer", sensorTex);
25 | converter.seti("rawWidth", sensorTex.getWidth());
26 | converter.seti("rawHeight", sensorTex.getHeight());
27 |
28 | mSensorG = new Texture(sensorTex.getWidth(), sensorTex.getHeight(), 1,
29 | Texture.Format.Float16, null);
30 |
31 | converter.seti("cfaPattern", preProcess.getCfaPattern());
32 | converter.drawBlocks(mSensorG);
33 | }
34 |
35 | @Override
36 | public int getShader() {
37 | return R.raw.stage1_2_fs;
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/convert/PreProcess.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.convert;
2 |
3 | import java.nio.FloatBuffer;
4 | import java.nio.ShortBuffer;
5 |
6 | import amirz.dngprocessor.R;
7 | import amirz.dngprocessor.gl.GLPrograms;
8 | import amirz.dngprocessor.gl.Texture;
9 | import amirz.dngprocessor.gl.TexturePool;
10 | import amirz.dngprocessor.params.SensorParams;
11 | import amirz.dngprocessor.pipeline.Stage;
12 | import amirz.dngprocessor.pipeline.StagePipeline;
13 |
14 | import static android.opengl.GLES20.*;
15 |
16 | public class PreProcess extends Stage {
17 | private final byte[] mRaw;
18 |
19 | private Texture mSensorTex, mGainMapTex;
20 |
21 | public PreProcess(byte[] raw) {
22 | mRaw = raw;
23 | }
24 |
25 | public Texture getSensorTex() {
26 | return mSensorTex;
27 | }
28 |
29 | public int getInWidth() {
30 | return getSensorParams().inputWidth;
31 | }
32 |
33 | public int getInHeight() {
34 | return getSensorParams().inputHeight;
35 | }
36 |
37 | public int getCfaPattern() {
38 | return getSensorParams().cfa;
39 | }
40 |
41 | public Texture getGainMapTex() {
42 | return mGainMapTex;
43 | }
44 |
45 | @Override
46 | protected void execute(StagePipeline.StageMap previousStages) {
47 | GLPrograms converter = getConverter();
48 | SensorParams sensor = getSensorParams();
49 |
50 | // First texture is just for normalization
51 | mSensorTex = TexturePool.get(getInWidth(), getInHeight(), 1,
52 | Texture.Format.Float16);
53 |
54 | try (Texture sensorUITex = TexturePool.get(getInWidth(), getInHeight(), 1,
55 | Texture.Format.UInt16)) {
56 | sensorUITex.setPixels(mRaw);
57 |
58 | converter.setTexture("rawBuffer", sensorUITex);
59 | converter.seti("rawWidth", getInWidth());
60 | converter.seti("rawHeight", getInHeight());
61 | converter.seti("cfaPattern", sensor.cfa);
62 |
63 | float[] gainMap = sensor.gainMap;
64 | int[] gainMapSize = sensor.gainMapSize;
65 | if (gainMap == null) {
66 | gainMap = new float[] { 1f, 1f, 1f, 1f };
67 | gainMapSize = new int[] { 1, 1 };
68 | }
69 |
70 | mGainMapTex = new Texture(gainMapSize[0], gainMapSize[1], 4, Texture.Format.Float16,
71 | FloatBuffer.wrap(gainMap), GL_LINEAR);
72 | converter.setTexture("gainMap", mGainMapTex);
73 |
74 | int[] blackLevel = sensor.blackLevelPattern;
75 | converter.setf("blackLevel", blackLevel[0], blackLevel[1], blackLevel[2], blackLevel[3]);
76 | converter.setf("whiteLevel", sensor.whiteLevel);
77 | converter.seti("cfaPattern", getCfaPattern());
78 | converter.seti("hotPixelsSize", sensor.hotPixelsSize);
79 |
80 | int[] hotPixelsSize = sensor.hotPixelsSize;
81 | try (Texture hotPx = new Texture(hotPixelsSize[0], hotPixelsSize[1], 1, Texture.Format.UInt16,
82 | ShortBuffer.wrap(sensor.hotPixels), GL_NEAREST, GL_REPEAT)) {
83 | converter.setTexture("hotPixels", hotPx);
84 | converter.drawBlocks(mSensorTex);
85 | }
86 | }
87 | }
88 |
89 | @Override
90 | public int getShader() {
91 | return R.raw.stage1_1_fs;
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/convert/ToIntermediate.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.convert;
2 |
3 | import android.util.Rational;
4 |
5 | import amirz.dngprocessor.R;
6 | import amirz.dngprocessor.gl.GLPrograms;
7 | import amirz.dngprocessor.gl.Texture;
8 | import amirz.dngprocessor.gl.TexturePool;
9 | import amirz.dngprocessor.pipeline.Stage;
10 | import amirz.dngprocessor.pipeline.StagePipeline;
11 |
12 | public class ToIntermediate extends Stage {
13 | private final float[] mSensorToXYZ_D50;
14 |
15 | private Texture mIntermediate;
16 |
17 | public ToIntermediate(float[] sensorToXYZ_D50) {
18 | mSensorToXYZ_D50 = sensorToXYZ_D50;
19 | }
20 |
21 | public Texture getIntermediate() {
22 | return mIntermediate;
23 | }
24 |
25 | @Override
26 | protected void execute(StagePipeline.StageMap previousStages) {
27 | GLPrograms converter = getConverter();
28 |
29 | PreProcess preProcess = previousStages.getStage(PreProcess.class);
30 |
31 | converter.seti("rawWidth", preProcess.getInWidth());
32 | converter.seti("rawHeight", preProcess.getInHeight());
33 |
34 | // Second texture for per-CFA pixel data
35 | mIntermediate = TexturePool.get(preProcess.getInWidth(), preProcess.getInHeight(), 3,
36 | Texture.Format.Float16);
37 |
38 | // Load mosaic and green raw texture
39 | try (Texture sensorGTex = previousStages.getStage(GreenDemosaic.class).getSensorGTex()) {
40 | try (Texture sensorTex = preProcess.getSensorTex()) {
41 | converter.setTexture("rawBuffer", sensorTex);
42 | converter.setTexture("greenBuffer", sensorGTex);
43 |
44 | float[] neutralPoint = getSensorParams().neutralColorPoint;
45 | byte[] cfaVal = getSensorParams().cfaVal;
46 | converter.setf("neutralLevel",
47 | neutralPoint[cfaVal[0]],
48 | neutralPoint[cfaVal[1]],
49 | neutralPoint[cfaVal[2]],
50 | neutralPoint[cfaVal[3]]);
51 |
52 | converter.setf("neutralPoint", neutralPoint);
53 | converter.setf("sensorToXYZ", mSensorToXYZ_D50);
54 | converter.seti("cfaPattern", preProcess.getCfaPattern());
55 |
56 | try (Texture gainMapTex = preProcess.getGainMapTex()) {
57 | converter.setTexture("gainMap", gainMapTex);
58 | converter.drawBlocks(mIntermediate);
59 | }
60 | }
61 | }
62 | }
63 |
64 | @Override
65 | public int getShader() {
66 | return R.raw.stage1_3_fs;
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/exposefuse/DoubleExpose.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.exposefuse;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.gl.TexturePool;
7 | import amirz.dngprocessor.pipeline.Stage;
8 | import amirz.dngprocessor.pipeline.StagePipeline;
9 | import amirz.dngprocessor.pipeline.convert.EdgeMirror;
10 |
11 | public class DoubleExpose extends Stage {
12 | private Texture mUnderexposed;
13 | private Texture mOverexposed;
14 |
15 | public Texture getUnderexposed() {
16 | return mUnderexposed;
17 | }
18 |
19 | public Texture getOverexposed() {
20 | return mOverexposed;
21 | }
22 |
23 | @Override
24 | protected void execute(StagePipeline.StageMap previousStages) {
25 | GLPrograms converter = getConverter();
26 |
27 | Texture normalExposure = previousStages.getStage(EdgeMirror.class).getIntermediate();
28 |
29 | mUnderexposed = TexturePool.get(normalExposure.getWidth(), normalExposure.getHeight(), 1,
30 | Texture.Format.Float16);
31 | converter.setTexture("buf", normalExposure);
32 | converter.setf("factor", 0.8f);
33 | converter.drawBlocks(mUnderexposed);
34 |
35 | mOverexposed = TexturePool.get(mUnderexposed);
36 | converter.setTexture("buf", normalExposure);
37 | converter.setf("factor", 1.8f);
38 | converter.drawBlocks(mOverexposed);
39 | }
40 |
41 | @Override
42 | public int getShader() {
43 | return R.raw.stage4_1_doubleexpose;
44 | }
45 |
46 | @Override
47 | protected boolean isEnabled() {
48 | return getProcessParams().exposeFuse;
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/exposefuse/FuseUtils.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.exposefuse;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.gl.TexturePool;
7 |
8 | public class FuseUtils {
9 | public static Texture downsample2x(GLPrograms converter, Texture in) {
10 | Texture downsampled = TexturePool.get(in.getWidth() / 2 + 1,
11 | in.getHeight() / 2 + 1,
12 | in.getChannels(),
13 | in.getFormat());
14 |
15 | try (Texture tmp2 = TexturePool.get(in)) {
16 | try (Texture tmp = TexturePool.get(in)) {
17 | blur2x(converter, in, tmp, tmp2);
18 | }
19 |
20 | converter.useProgram(R.raw.stage4_2_downsample);
21 | converter.setTexture("buf", tmp2);
22 | converter.seti("maxxy", tmp2.getWidth() - 1, tmp2.getHeight() - 1);
23 | converter.drawBlocks(downsampled, false);
24 | }
25 |
26 | return downsampled;
27 | }
28 |
29 | public static Texture upsample2x(GLPrograms converter, Texture in, Texture dimens) {
30 | Texture upsampled = TexturePool.get(dimens);
31 |
32 | converter.useProgram(R.raw.stage4_3_upsample);
33 | converter.setTexture("buf", in);
34 | converter.drawBlocks(upsampled);
35 |
36 | try (Texture tmp = TexturePool.get(upsampled)) {
37 | blur2x(converter, upsampled, tmp, upsampled);
38 | }
39 |
40 | return upsampled;
41 | }
42 |
43 | public static void blur2x(GLPrograms converter, Texture in, Texture tmp, Texture out) {
44 | converter.useProgram(R.raw.stage4_0_blur_1ch_fs);
45 | converter.seti("bufSize", in.getWidth(), in.getHeight());
46 | converter.setf("sigma", 1.36f);
47 | converter.seti("radius", 2);
48 |
49 | converter.setTexture("buf", in);
50 | converter.seti("dir", 1, 0); // Horizontal
51 | converter.drawBlocks(tmp, false);
52 |
53 | converter.setTexture("buf", tmp);
54 | converter.seti("dir", 0, 1); // Vertical
55 | converter.drawBlocks(out, false);
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/exposefuse/Laplace.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.exposefuse;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.gl.TexturePool;
7 | import amirz.dngprocessor.pipeline.Stage;
8 | import amirz.dngprocessor.pipeline.StagePipeline;
9 |
10 | import static amirz.dngprocessor.pipeline.exposefuse.FuseUtils.*;
11 |
12 | public class Laplace extends Stage {
13 | private static final int LEVELS = 10;
14 |
15 | public static class Pyramid {
16 | public Texture[] gauss;
17 | public Texture[] laplace;
18 | }
19 |
20 | private Pyramid mUnderPyramid;
21 | private Pyramid mOverPyramid;
22 |
23 | public Pyramid getUnderPyramid() {
24 | return mUnderPyramid;
25 | }
26 |
27 | public Pyramid getOverPyramid() {
28 | return mOverPyramid;
29 | }
30 |
31 | public void releasePyramid() {
32 | for (Pyramid pyr : new Pyramid[] { mUnderPyramid, mOverPyramid }) {
33 | for (Texture tex : pyr.gauss) {
34 | tex.close();
35 | }
36 | for (Texture tex : pyr.laplace) {
37 | tex.close();
38 | }
39 | }
40 | mUnderPyramid = null;
41 | mOverPyramid = null;
42 | }
43 |
44 | @Override
45 | protected void execute(StagePipeline.StageMap previousStages) {
46 | DoubleExpose de = previousStages.getStage(DoubleExpose.class);
47 | mUnderPyramid = createPyramid(de.getUnderexposed());
48 | mOverPyramid = createPyramid(de.getOverexposed());
49 | }
50 |
51 | private Pyramid createPyramid(Texture remainder) {
52 | GLPrograms converter = getConverter();
53 |
54 | // Downsample loop.
55 | Texture[] downsampled = new Texture[LEVELS];
56 | downsampled[0] = remainder;
57 | for (int i = 1; i < downsampled.length; i++) {
58 | downsampled[i] = downsample2x(converter, downsampled[i - 1]);
59 | }
60 |
61 | // Upsample loop.
62 | Texture[] upsampled = new Texture[downsampled.length - 1];
63 | for (int i = 0; i < upsampled.length; i++) {
64 | upsampled[i] = upsample2x(converter, downsampled[i + 1], downsampled[i]);
65 | }
66 |
67 | // Diff loop. Indices for resolution are the same between downsampled and upsampled,
68 | // but the upsampled ones lack high frequency information.
69 | Texture[] diff = new Texture[upsampled.length];
70 | converter.useProgram(R.raw.stage4_4_difference);
71 | for (int i = 0; i < diff.length; i++) {
72 | converter.setTexture("base", upsampled[i]);
73 | converter.setTexture("target", downsampled[i]);
74 |
75 | diff[i] = TexturePool.get(upsampled[i]);
76 | converter.drawBlocks(diff[i]);
77 | upsampled[i].close();
78 | }
79 |
80 | Pyramid pyramid = new Pyramid();
81 | pyramid.gauss = downsampled;
82 | pyramid.laplace = diff;
83 | return pyramid;
84 | }
85 |
86 | @Override
87 | public int getShader() {
88 | return R.raw.stage4_2_downsample;
89 | }
90 |
91 | @Override
92 | protected boolean isEnabled() {
93 | return getProcessParams().exposeFuse;
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/exposefuse/Merge.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.exposefuse;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.gl.TexturePool;
7 | import amirz.dngprocessor.pipeline.Stage;
8 | import amirz.dngprocessor.pipeline.StagePipeline;
9 | import amirz.dngprocessor.pipeline.convert.EdgeMirror;
10 |
11 | import static amirz.dngprocessor.pipeline.exposefuse.FuseUtils.upsample2x;
12 |
13 | public class Merge extends Stage {
14 | private Texture mMerged;
15 |
16 | public Texture getMerged() {
17 | return mMerged;
18 | }
19 |
20 | @Override
21 | protected void execute(StagePipeline.StageMap previousStages) {
22 | GLPrograms converter = getConverter();
23 |
24 | Laplace laplace = previousStages.getStage(Laplace.class);
25 | Laplace.Pyramid underExpo = laplace.getUnderPyramid();
26 | Laplace.Pyramid overExpo = laplace.getOverPyramid();
27 |
28 | converter.useProgram(getShader());
29 | converter.seti("useUpscaled", 0);
30 | converter.setTexture("gaussUnder", underExpo.gauss[underExpo.gauss.length - 1]);
31 | converter.setTexture("gaussOver", overExpo.gauss[overExpo.gauss.length - 1]);
32 | converter.setTexture("blendUnder", underExpo.gauss[underExpo.gauss.length - 1]);
33 | converter.setTexture("blendOver", overExpo.gauss[overExpo.gauss.length - 1]);
34 | converter.seti("level", overExpo.gauss.length - 1);
35 |
36 | Texture wip = TexturePool.get(underExpo.gauss[underExpo.gauss.length - 1]);
37 | converter.drawBlocks(wip, false);
38 |
39 | // Start with the lowest level gaussian.
40 | for (int i = underExpo.laplace.length - 1; i >= 0; i--) {
41 | try (Texture upscaleWip = upsample2x(converter, wip, underExpo.gauss[i])) {
42 | converter.useProgram(getShader());
43 |
44 | // We can discard the previous work in progress merge.
45 | wip.close();
46 | wip = TexturePool.get(underExpo.laplace[i]);
47 |
48 | converter.seti("useUpscaled", 1);
49 | converter.setTexture("upscaled", upscaleWip);
50 | converter.setTexture("gaussUnder", underExpo.gauss[i]);
51 | converter.setTexture("gaussOver", overExpo.gauss[i]);
52 | converter.setTexture("blendUnder", underExpo.laplace[i]);
53 | converter.setTexture("blendOver", overExpo.laplace[i]);
54 | converter.seti("level", i);
55 |
56 | converter.drawBlocks(wip, false);
57 |
58 | /*
59 | if (i < 4) {
60 | // Reuse laplace for NR, and swap with non-NR texture, which will be closed
61 | // by call to releasePyramid below.
62 | Texture tmp = underExpo.laplace[i];
63 | noiseReduce(wip, tmp, i);
64 | underExpo.laplace[i] = wip;
65 | wip = tmp;
66 | }
67 | */
68 | }
69 | }
70 |
71 | laplace.releasePyramid();
72 | Texture chroma = previousStages.getStage(EdgeMirror.class).getIntermediate();
73 | mMerged = chroma; // Reuse.
74 |
75 | converter.useProgram(R.raw.stage4_9_combine_z);
76 | converter.setTexture("bufChroma", chroma);
77 | converter.setTexture("bufLuma", wip);
78 | converter.drawBlocks(mMerged);
79 |
80 | wip.close();
81 | }
82 |
83 | private void noiseReduce(Texture in, Texture out, int level) {
84 | GLPrograms converter = getConverter();
85 | converter.useProgram(level > 0
86 | ? R.raw.stage4_7_nr_intermediate
87 | : R.raw.stage4_8_nr_zero);
88 | converter.setTexture("buf", in);
89 | converter.seti("bufEdge", in.getWidth() - 1, in.getHeight() - 1);
90 | converter.setf("blendY", 0.9f);
91 | if (level > 0) {
92 | converter.setf("sigma", 0.4f, 0.03f);
93 | }
94 | converter.drawBlocks(out, level == 0);
95 | }
96 |
97 | @Override
98 | public int getShader() {
99 | return R.raw.stage4_5_merge;
100 | }
101 |
102 | @Override
103 | protected boolean isEnabled() {
104 | return getProcessParams().exposeFuse;
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/intermediate/Analysis.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.intermediate;
2 |
3 | import android.util.Log;
4 |
5 | import java.nio.ByteBuffer;
6 | import java.nio.ByteOrder;
7 | import java.nio.FloatBuffer;
8 | import java.util.Arrays;
9 |
10 | import amirz.dngprocessor.R;
11 | import amirz.dngprocessor.gl.GLPrograms;
12 | import amirz.dngprocessor.gl.Texture;
13 | import amirz.dngprocessor.gl.TexturePool;
14 | import amirz.dngprocessor.math.Histogram;
15 | import amirz.dngprocessor.pipeline.Stage;
16 | import amirz.dngprocessor.pipeline.StagePipeline;
17 | import amirz.dngprocessor.pipeline.convert.EdgeMirror;
18 | import amirz.dngprocessor.pipeline.convert.ToIntermediate;
19 | import amirz.dngprocessor.pipeline.exposefuse.Merge;
20 | import amirz.dngprocessor.pipeline.noisereduce.NoiseReduce;
21 |
22 | import static android.opengl.GLES20.*;
23 |
24 | public class Analysis extends Stage {
25 | private static final String TAG = "SampleHistogram";
26 |
27 | private final int mOutWidth, mOutHeight, mOffsetX, mOffsetY;
28 | private float[] mSigma, mHist;
29 | private float mGamma;
30 |
31 | public Analysis(int outWidth, int outHeight, int offsetX, int offsetY) {
32 | mOutWidth = outWidth;
33 | mOutHeight = outHeight;
34 | mOffsetX = offsetX;
35 | mOffsetY = offsetY;
36 | }
37 |
38 | public float[] getSigma() {
39 | return mSigma;
40 | }
41 |
42 | public float[] getHist() {
43 | return mHist;
44 | }
45 |
46 | public float getGamma() {
47 | return mGamma;
48 | }
49 |
50 | @Override
51 | protected void execute(StagePipeline.StageMap previousStages) {
52 | GLPrograms converter = getConverter();
53 |
54 | Texture intermediate = previousStages.getStage(Merge.class).getMerged();
55 | if (intermediate == null) {
56 | intermediate = previousStages.getStage(EdgeMirror.class).getIntermediate();
57 | }
58 | converter.useProgram(R.raw.stage2_2_analysis_fs);
59 |
60 | converter.setTexture("intermediate", intermediate);
61 | converter.seti("outOffset", mOffsetX, mOffsetY);
62 |
63 | int w = mOutWidth;
64 | int h = mOutHeight;
65 | int samplingFactor = 16;
66 |
67 | // Analyze
68 | w /= samplingFactor;
69 | h /= samplingFactor;
70 |
71 | converter.seti("samplingFactor", samplingFactor);
72 |
73 | try (Texture analyzeTex = TexturePool.get(w, h, 4, Texture.Format.Float16)) {
74 | converter.drawBlocks(analyzeTex);
75 |
76 | int whPixels = w * h;
77 | float[] f = new float[whPixels * 4];
78 | FloatBuffer fb = ByteBuffer.allocateDirect(f.length * 4)
79 | .order(ByteOrder.nativeOrder())
80 | .asFloatBuffer();
81 | fb.mark();
82 |
83 | glReadPixels(0, 0, w, h, GL_RGBA, GL_FLOAT, fb.reset());
84 | fb.get(f);
85 |
86 | // Calculate a histogram on the result
87 | Histogram histParser = new Histogram(f, whPixels);
88 | mSigma = histParser.sigma;
89 | mHist = histParser.hist;
90 | mGamma = histParser.gamma;
91 |
92 | Log.d(TAG, "Sigma " + Arrays.toString(mSigma));
93 | Log.d(TAG, "LogAvg " + histParser.logAvgLuminance);
94 | Log.d(TAG, "Gamma " + histParser.gamma);
95 | }
96 | }
97 |
98 | @Override
99 | public int getShader() {
100 | return R.raw.stage2_1_noise_level_fs;
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/intermediate/BilateralFilter.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.intermediate;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.gl.TexturePool;
7 | import amirz.dngprocessor.params.ProcessParams;
8 | import amirz.dngprocessor.pipeline.Stage;
9 | import amirz.dngprocessor.pipeline.StagePipeline;
10 | import amirz.dngprocessor.pipeline.convert.ToIntermediate;
11 | import amirz.dngprocessor.pipeline.exposefuse.Merge;
12 | import amirz.dngprocessor.pipeline.noisereduce.NoiseReduce;
13 |
14 | public class BilateralFilter extends Stage {
15 | private final ProcessParams mProcess;
16 | private Texture mBilateral;
17 |
18 | public BilateralFilter(ProcessParams process) {
19 | mProcess = process;
20 | }
21 |
22 | public Texture getBilateral() {
23 | return mBilateral;
24 | }
25 |
26 | @Override
27 | protected void execute(StagePipeline.StageMap previousStages) {
28 | if (mProcess.histFactor == 0f) {
29 | return;
30 | }
31 |
32 | GLPrograms converter = getConverter();
33 |
34 | Texture intermediate = previousStages.getStage(Merge.class).getMerged();
35 | int w = intermediate.getWidth();
36 | int h = intermediate.getHeight();
37 |
38 | mBilateral = TexturePool.get(w, h, 3, Texture.Format.Float16);
39 | try (Texture bilateralTmp = TexturePool.get(w, h, 3, Texture.Format.Float16)) {
40 | // Pre-bilateral median filter.
41 | converter.setTexture("buf", intermediate);
42 | converter.drawBlocks(bilateralTmp, false);
43 |
44 | // 3-step bilateral filter setup.
45 | converter.useProgram(R.raw.stage2_3_bilateral);
46 | converter.seti("bufSize", w, h);
47 |
48 | // 1) Small area, strong blur.
49 | converter.setTexture("buf", bilateralTmp);
50 | converter.setf("sigma", 0.03f, 0.5f);
51 | converter.seti("radius", 3, 1);
52 | converter.drawBlocks(mBilateral, false);
53 |
54 | // 2) Medium area, medium blur.
55 | converter.setTexture("buf", mBilateral);
56 | converter.setf("sigma", 0.02f, 3f);
57 | converter.seti("radius", 6, 2);
58 | converter.drawBlocks(bilateralTmp, false);
59 |
60 | // 3) Large area, weak blur.
61 | converter.setTexture("buf", bilateralTmp);
62 | converter.setf("sigma", 0.01f, 9f);
63 | converter.seti("radius", 9, 3);
64 | converter.drawBlocks(mBilateral);
65 | }
66 | }
67 |
68 | @Override
69 | public int getShader() {
70 | return R.raw.stage2_3_median;
71 | }
72 |
73 | @Override
74 | protected boolean isEnabled() {
75 | return false;
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/intermediate/MergeDetail.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.intermediate;
2 |
3 | import android.util.Log;
4 |
5 | import java.nio.FloatBuffer;
6 |
7 | import amirz.dngprocessor.R;
8 | import amirz.dngprocessor.gl.GLPrograms;
9 | import amirz.dngprocessor.gl.Texture;
10 | import amirz.dngprocessor.params.ProcessParams;
11 | import amirz.dngprocessor.pipeline.Stage;
12 | import amirz.dngprocessor.pipeline.StagePipeline;
13 | import amirz.dngprocessor.pipeline.convert.EdgeMirror;
14 | import amirz.dngprocessor.pipeline.exposefuse.Merge;
15 |
16 | import static android.opengl.GLES20.*;
17 |
18 | public class MergeDetail extends Stage {
19 | private static final String TAG = "MergeDetail";
20 |
21 | private static final float MIN_GAMMA = 0.55f;
22 |
23 | private final float mHistFactor;
24 | private Texture mIntermediate;
25 |
26 | public MergeDetail(ProcessParams processParams) {
27 | mHistFactor = processParams.histFactor;
28 | }
29 |
30 | public Texture getIntermediate() {
31 | return mIntermediate;
32 | }
33 |
34 | @Override
35 | protected void execute(StagePipeline.StageMap previousStages) {
36 | GLPrograms converter = getConverter();
37 |
38 | //BilateralFilter bilateral = previousStages.getStage(BilateralFilter.class);
39 | //Texture bilateralTex = bilateral.getBilateral();
40 |
41 | mIntermediate = previousStages.getStage(Merge.class).getMerged();
42 | if (mIntermediate == null) {
43 | mIntermediate = previousStages.getStage(EdgeMirror.class).getIntermediate();
44 | }
45 |
46 | // If there is no bilateral filtered texture, skip this step.
47 | //if (bilateralTex == null) {
48 | //return;
49 | //}
50 |
51 | Analysis sampleHistogram = previousStages.getStage(Analysis.class);
52 | float[] hist = sampleHistogram.getHist();
53 |
54 | Texture histTex = new Texture(hist.length, 1, 1, Texture.Format.Float16,
55 | FloatBuffer.wrap(hist), GL_LINEAR, GL_CLAMP_TO_EDGE);
56 | converter.setTexture("hist", histTex);
57 | converter.setf("histOffset", 0.5f / hist.length, 1.f - 1.f / hist.length);
58 |
59 | // If there are many dark patches, the color noise goes up.
60 | // To ensure that we do not boost that too much, reduce with color noise.
61 | float[] sigma = sampleHistogram.getSigma();
62 | float minGamma = Math.min(1f, MIN_GAMMA + 3f * (float) Math.hypot(sigma[0], sigma[1]));
63 | float gamma = sampleHistogram.getGamma();
64 | gamma = Math.max(minGamma, gamma < 1.f ? 0.55f + 0.45f * gamma : gamma);
65 | gamma = (float) Math.pow(gamma, mHistFactor);
66 | Log.d(TAG, "Setting gamma of " + gamma + " (original " + sampleHistogram.getGamma() + ")");
67 | converter.setf("gamma", gamma);
68 |
69 | // Reduce the histogram equalization in scenes with good light distribution.
70 | float bilatHistEq = Math.max(0.4f, 1f - sampleHistogram.getGamma() * 0.6f
71 | - 4f * (float) Math.hypot(sigma[0], sigma[1]));
72 | Log.d(TAG, "Smoothed histogram equalization " + bilatHistEq);
73 | converter.setf("histFactor", bilatHistEq * mHistFactor);
74 |
75 | converter.setTexture("intermediate", mIntermediate);
76 | //converter.setTexture("bilateral", bilateralTex);
77 |
78 | converter.drawBlocks(mIntermediate);
79 |
80 | //bilateralTex.close();
81 | }
82 |
83 | @Override
84 | public int getShader() {
85 | return R.raw.stage2_4_merge_detail;
86 | }
87 |
88 | @Override
89 | public void close() {
90 | mIntermediate.close();
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/noisereduce/Decompose.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.noisereduce;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.pipeline.Stage;
7 | import amirz.dngprocessor.pipeline.StagePipeline;
8 | import amirz.dngprocessor.pipeline.convert.EdgeMirror;
9 |
10 | public class Decompose extends Stage {
11 | private Texture mHighRes, mMediumRes, mLowRes;
12 |
13 | public Texture[] getLayers() {
14 | return new Texture[] {
15 | mHighRes, mMediumRes, mLowRes
16 | };
17 | }
18 |
19 | @Override
20 | protected void execute(StagePipeline.StageMap previousStages) {
21 | GLPrograms converter = getConverter();
22 |
23 | mHighRes = previousStages.getStage(EdgeMirror.class).getIntermediate();
24 | int w = mHighRes.getWidth();
25 | int h = mHighRes.getHeight();
26 |
27 | converter.seti("bufSize", w, h);
28 |
29 | try (Texture tmp = new Texture(w, h, 3,
30 | Texture.Format.Float16, null)) {
31 | converter.setf("sigma", 3f);
32 | converter.seti("radius", 6);
33 |
34 | // First render to the tmp buffer.
35 | converter.setTexture("buf", mHighRes);
36 | converter.seti("dir", 0, 1); // Vertical
37 | converter.drawBlocks(tmp, false);
38 |
39 | // Now render from tmp to the real buffer.
40 | converter.setTexture("buf", tmp);
41 | converter.seti("dir", 1, 0); // Horizontal
42 |
43 | mMediumRes = new Texture(w, h, 3,
44 | Texture.Format.Float16, null);
45 | converter.drawBlocks(mMediumRes);
46 |
47 | converter.setf("sigma", 9f);
48 | converter.seti("radius", 18);
49 |
50 | // First render to the tmp buffer.
51 | converter.setTexture("buf", mMediumRes);
52 | converter.seti("dir", 0, 1); // Vertical
53 | converter.drawBlocks(tmp, false);
54 |
55 | // Now render from tmp to the real buffer.
56 | converter.setTexture("buf", tmp);
57 | converter.seti("dir", 1, 0); // Horizontal
58 |
59 | mLowRes = new Texture(w, h, 3,
60 | Texture.Format.Float16, null);
61 | converter.drawBlocks(mLowRes);
62 | }
63 | }
64 |
65 | @Override
66 | public int getShader() {
67 | return R.raw.stage2_0_blur_3ch_fs;
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/noisereduce/NoiseMap.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.noisereduce;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.pipeline.Stage;
7 | import amirz.dngprocessor.pipeline.StagePipeline;
8 |
9 | public class NoiseMap extends Stage {
10 | private Texture[] mNoiseTex;
11 |
12 | public Texture[] getNoiseTex() {
13 | return mNoiseTex;
14 | }
15 |
16 | @Override
17 | protected void execute(StagePipeline.StageMap previousStages) {
18 | GLPrograms converter = getConverter();
19 | Texture[] layers = previousStages.getStage(Decompose.class).getLayers();
20 |
21 | mNoiseTex = new Texture[layers.length];
22 | for (int i = 0; i < layers.length; i++) {
23 | converter.setTexture("intermediate", layers[i]);
24 | converter.seti("bufSize", layers[i].getWidth(), layers[i].getHeight());
25 | converter.seti("radius", 1 << (i * 2));
26 | mNoiseTex[i] = new Texture(layers[i].getWidth() / 4 + 1,
27 | layers[i].getHeight() / 4 + 1, 3,
28 | Texture.Format.Float16, null);
29 | converter.drawBlocks(mNoiseTex[i]);
30 | }
31 |
32 | converter.useProgram(R.raw.stage2_1_noise_level_blur_fs);
33 |
34 | for (int i = 0; i < layers.length; i++) {
35 | Texture layer = mNoiseTex[i];
36 | converter.seti("minxy", 0, 0);
37 | converter.seti("maxxy", layer.getWidth() - 1, layer.getHeight() - 1);
38 | try (Texture tmp = new Texture(layer.getWidth(), layer.getHeight(), 3,
39 | Texture.Format.Float16, null)) {
40 |
41 | // First render to the tmp buffer.
42 | converter.setTexture("buf", mNoiseTex[i]);
43 | converter.setf("sigma", 1.5f * (1 << i));
44 | converter.seti("radius", 3 << i, 1);
45 | converter.seti("dir", 0, 1); // Vertical
46 | converter.drawBlocks(tmp, false);
47 |
48 | // Now render from tmp to the real buffer.
49 | converter.setTexture("buf", tmp);
50 | converter.seti("dir", 1, 0); // Horizontal
51 | converter.drawBlocks(mNoiseTex[i]);
52 | }
53 | }
54 | }
55 |
56 | @Override
57 | public int getShader() {
58 | return R.raw.stage2_1_noise_level_fs;
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/noisereduce/NoiseReduce.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.noisereduce;
2 |
3 | import android.util.Log;
4 |
5 | import amirz.dngprocessor.R;
6 | import amirz.dngprocessor.gl.GLPrograms;
7 | import amirz.dngprocessor.gl.Texture;
8 | import amirz.dngprocessor.params.ProcessParams;
9 | import amirz.dngprocessor.params.SensorParams;
10 | import amirz.dngprocessor.pipeline.Stage;
11 | import amirz.dngprocessor.pipeline.StagePipeline;
12 | import amirz.dngprocessor.pipeline.convert.EdgeMirror;
13 |
14 | public class NoiseReduce extends Stage {
15 | private static final String TAG = "NoiseReduce";
16 |
17 | private final SensorParams mSensorParams;
18 | private final ProcessParams mProcessParams;
19 | private Texture mDenoised;
20 |
21 | public NoiseReduce(SensorParams sensor, ProcessParams process) {
22 | mSensorParams = sensor;
23 | mProcessParams = process;
24 | }
25 |
26 | public Texture getDenoised() {
27 | return mDenoised;
28 | }
29 |
30 | @Override
31 | protected void execute(StagePipeline.StageMap previousStages) {
32 | GLPrograms converter = getConverter();
33 |
34 | Texture[] layers = previousStages.getStage(Decompose.class).getLayers();
35 | mDenoised = layers[0];
36 |
37 | if (mProcessParams.denoiseFactor == 0) {
38 | for (int i = 1; i < layers.length; i++) {
39 | layers[i].close();
40 | }
41 | return;
42 | }
43 |
44 | Texture[] denoised = new Texture[layers.length];
45 | Texture[] noise = previousStages.getStage(NoiseMap.class).getNoiseTex();
46 |
47 | // Gaussian pyramid denoising.
48 | for (int i = 0; i < layers.length; i++) {
49 | converter.useProgram(R.raw.stage3_1_noise_reduce_fs);
50 |
51 | converter.seti("bufSize", layers[i].getWidth(), layers[i].getHeight());
52 | converter.setTexture("noiseTex", noise[i]);
53 | converter.seti("radius", 3 << (i * 2), 1 << (i * 2)); // Radius, Sampling
54 | converter.setf("sigma", 2f * (1 << (i * 2)), 3f / (i + 1)); // Spatial, Color
55 | converter.setf("blendY", 3f / (2f + layers.length - i));
56 |
57 | denoised[i] = new Texture(layers[i]);
58 |
59 | converter.setTexture("inBuffer", layers[i]);
60 | converter.drawBlocks(denoised[i]);
61 | }
62 |
63 | // Merge all layers.
64 | converter.useProgram(R.raw.stage3_1_noise_reduce_remove_noise_fs);
65 |
66 | converter.setTexture("bufDenoisedHighRes", denoised[0]);
67 | converter.setTexture("bufDenoisedMediumRes", denoised[1]);
68 | converter.setTexture("bufDenoisedLowRes", denoised[2]);
69 | converter.setTexture("bufNoisyMediumRes", layers[1]);
70 | converter.setTexture("bufNoisyLowRes", layers[2]);
71 | converter.setTexture("noiseTexMediumRes", noise[1]);
72 | converter.setTexture("noiseTexLowRes", noise[2]);
73 |
74 | // Reuse original high res noisy texture.
75 | converter.drawBlocks(mDenoised);
76 |
77 | // Cleanup.
78 | denoised[0].close();
79 | for (int i = 1; i < layers.length; i++) {
80 | layers[i].close();
81 | denoised[i].close();
82 | }
83 | }
84 |
85 | @Override
86 | public int getShader() {
87 | return R.raw.stage3_1_noise_reduce_fs;
88 | }
89 |
90 | /**
91 | * Noise Reduction Parameters.
92 | */
93 | static class NRParams {
94 | final float[] sigma;
95 | final int denoiseFactor;
96 | final float sharpenFactor;
97 | final float adaptiveSaturation, adaptiveSaturationPow;
98 |
99 | private NRParams(ProcessParams params, float[] s) {
100 | sigma = s;
101 |
102 | float hypot = (float) Math.hypot(s[0], s[1]);
103 | Log.d(TAG, "Chroma noise hypot " + hypot);
104 |
105 | denoiseFactor = (int)((float) params.denoiseFactor * Math.sqrt(s[0] + s[1]));
106 | Log.d(TAG, "Denoise radius " + denoiseFactor);
107 |
108 | sharpenFactor = Math.max(params.sharpenFactor - 6f * hypot, -0.25f);
109 | Log.d(TAG, "Sharpen factor " + sharpenFactor);
110 |
111 | adaptiveSaturation = Math.max(0f, params.adaptiveSaturation[0] - 30f * hypot);
112 | adaptiveSaturationPow = params.adaptiveSaturation[1];
113 | Log.d(TAG, "Adaptive saturation " + adaptiveSaturation);
114 | }
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/post/BlurLCE.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.post;
2 |
3 | import amirz.dngprocessor.R;
4 | import amirz.dngprocessor.gl.GLPrograms;
5 | import amirz.dngprocessor.gl.Texture;
6 | import amirz.dngprocessor.gl.TexturePool;
7 | import amirz.dngprocessor.pipeline.Stage;
8 | import amirz.dngprocessor.pipeline.StagePipeline;
9 | import amirz.dngprocessor.pipeline.intermediate.MergeDetail;
10 |
11 | public class BlurLCE extends Stage {
12 | private Texture mWeakBlur, mMediumBlur, mStrongBlur;
13 |
14 | public Texture getWeakBlur() {
15 | return mWeakBlur;
16 | }
17 |
18 | public Texture getMediumBlur() {
19 | return mMediumBlur;
20 | }
21 |
22 | public Texture getStrongBlur() {
23 | return mStrongBlur;
24 | }
25 |
26 | @Override
27 | protected void execute(StagePipeline.StageMap previousStages) {
28 | if (!getProcessParams().lce) {
29 | return;
30 | }
31 |
32 | Texture intermediate = previousStages.getStage(MergeDetail.class).getIntermediate();
33 | GLPrograms converter = getConverter();
34 |
35 | int w = intermediate.getWidth();
36 | int h = intermediate.getHeight();
37 |
38 | try (Texture tmp = TexturePool.get(w, h, 1, Texture.Format.Float16)) {
39 | int offsetX = getSensorParams().outputOffsetX;
40 | int offsetY = getSensorParams().outputOffsetY;
41 | converter.seti("minxy", offsetX, offsetY);
42 | converter.seti("maxxy", w - offsetX - 1, h - offsetY - 1);
43 |
44 | {
45 | // First render to the tmp buffer.
46 | converter.setTexture("buf", intermediate);
47 | converter.setf("sigma", 2.5f);
48 | converter.seti("radius", 9, 1);
49 | //converter.setf("sigma", 32f);
50 | //converter.seti("radius", 84, 4);
51 | converter.seti("dir", 0, 1); // Vertical
52 | converter.setf("ch", 0, 1); // xy[Y]
53 | converter.drawBlocks(tmp, false);
54 |
55 | // Now render from tmp to the real buffer.
56 | converter.setTexture("buf", tmp);
57 | converter.seti("dir", 1, 0); // Horizontal
58 | converter.setf("ch", 1, 0); // [Y]00
59 |
60 | mStrongBlur = TexturePool.get(w, h, 1, Texture.Format.Float16);
61 | converter.drawBlocks(mStrongBlur);
62 | }
63 |
64 | {
65 | // First render to the tmp buffer.
66 | converter.setTexture("buf", intermediate);
67 | converter.seti("buf", 0);
68 | //converter.setf("sigma", 2f);
69 | //converter.seti("radius", 5, 1);
70 | converter.setf("sigma", 1.5f);
71 | converter.seti("radius", 6, 1);
72 | converter.seti("dir", 0, 1); // Vertical
73 | converter.setf("ch", 0, 1); // xy[Y]
74 | converter.drawBlocks(tmp, false);
75 |
76 | // Now render from tmp to the real buffer.
77 | converter.setTexture("buf", tmp);
78 | converter.seti("dir", 1, 0); // Horizontal
79 | converter.setf("ch", 1, 0); // [Y]00
80 |
81 | mMediumBlur = TexturePool.get(w, h, 1, Texture.Format.Float16);
82 | converter.drawBlocks(mMediumBlur);
83 | }
84 |
85 | {
86 | // First render to the tmp buffer.
87 | converter.setTexture("buf", intermediate);
88 | //converter.setf("sigma", 0.33f);
89 | //converter.seti("radius", 2, 1);
90 | converter.setf("sigma", 0.67f);
91 | converter.seti("radius", 3, 1);
92 | converter.seti("dir", 0, 1); // Vertical
93 | converter.setf("ch", 0, 1); // xy[Y]
94 | converter.drawBlocks(tmp, false);
95 |
96 | // Now render from tmp to the real buffer.
97 | converter.setTexture("buf", tmp);
98 | converter.seti("dir", 1, 0); // Horizontal
99 | converter.setf("ch", 1, 0); // [Y]00
100 |
101 | mWeakBlur = TexturePool.get(w, h, 1, Texture.Format.Float16);
102 | converter.drawBlocks(mWeakBlur);
103 | }
104 | }
105 | }
106 |
107 | @Override
108 | public void close() {
109 | if (getProcessParams().lce) {
110 | mWeakBlur.close();
111 | mMediumBlur.close();
112 | mStrongBlur.close();
113 | }
114 | }
115 |
116 | @Override
117 | public int getShader() {
118 | return R.raw.stage3_2_blur_fs;
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/pipeline/post/ToneMap.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.pipeline.post;
2 |
3 | import android.util.Log;
4 |
5 | import java.nio.ByteBuffer;
6 | import java.nio.FloatBuffer;
7 | import java.util.Random;
8 |
9 | import amirz.dngprocessor.R;
10 | import amirz.dngprocessor.gl.GLPrograms;
11 | import amirz.dngprocessor.gl.Texture;
12 | import amirz.dngprocessor.params.ProcessParams;
13 | import amirz.dngprocessor.params.SensorParams;
14 | import amirz.dngprocessor.pipeline.Stage;
15 | import amirz.dngprocessor.pipeline.StagePipeline;
16 | import amirz.dngprocessor.pipeline.intermediate.MergeDetail;
17 |
18 | import static amirz.dngprocessor.colorspace.ColorspaceConstants.CUSTOM_ACR3_TONEMAP_CURVE_COEFFS;
19 | import static android.opengl.GLES20.*;
20 |
21 | public class ToneMap extends Stage {
22 | private static final String TAG = "ToneMap";
23 |
24 | private final int[] mFbo = new int[1];
25 | private final float[] mXYZtoProPhoto, mProPhotoToSRGB;
26 |
27 | private final int ditherSize = 128;
28 | private final byte[] dither = new byte[ditherSize * ditherSize * 2];
29 |
30 | private Texture mDitherTex;
31 |
32 | public ToneMap(float[] XYZtoProPhoto, float[] proPhotoToSRGB) {
33 | mXYZtoProPhoto = XYZtoProPhoto;
34 | mProPhotoToSRGB = proPhotoToSRGB;
35 | }
36 |
37 | @Override
38 | public void init(GLPrograms converter, SensorParams sensor, ProcessParams process) {
39 | super.init(converter, sensor, process);
40 |
41 | // Save output FBO.
42 | glGetIntegerv(GL_FRAMEBUFFER_BINDING, mFbo, 0);
43 | }
44 |
45 | @Override
46 | protected void execute(StagePipeline.StageMap previousStages) {
47 | GLPrograms converter = getConverter();
48 | SensorParams sensor = getSensorParams();
49 | ProcessParams process = getProcessParams();
50 |
51 | // Load intermediate buffers as textures
52 | Texture highRes = previousStages.getStage(MergeDetail.class).getIntermediate();
53 | converter.setTexture("highRes", highRes);
54 | converter.seti("intermediateWidth", highRes.getWidth());
55 | converter.seti("intermediateHeight", highRes.getHeight());
56 |
57 | if (process.lce) {
58 | BlurLCE blur = previousStages.getStage(BlurLCE.class);
59 | converter.setTexture("weakBlur", blur.getWeakBlur());
60 | converter.setTexture("mediumBlur", blur.getMediumBlur());
61 | converter.setTexture("strongBlur", blur.getStrongBlur());
62 | }
63 |
64 | float satLimit = getProcessParams().satLimit;
65 | Log.d(TAG, "Saturation limit " + satLimit);
66 | converter.setf("satLimit", satLimit);
67 |
68 | converter.setf("toneMapCoeffs", CUSTOM_ACR3_TONEMAP_CURVE_COEFFS);
69 | converter.setf("XYZtoProPhoto", mXYZtoProPhoto);
70 | converter.setf("proPhotoToSRGB", mProPhotoToSRGB);
71 | converter.seti("outOffset", sensor.outputOffsetX, sensor.outputOffsetY);
72 |
73 | converter.seti("lce", process.lce ? 1 : 0);
74 | //NoiseReduce.NRParams nrParams = previousStages.getStage(NoiseReduce.class).getNRParams();
75 | //converter.setf("sharpenFactor", nrParams.sharpenFactor);
76 | //converter.setf("adaptiveSaturation", nrParams.adaptiveSaturation, nrParams.adaptiveSaturationPow);
77 | converter.setf("sharpenFactor", process.sharpenFactor);
78 | converter.setf("adaptiveSaturation", process.adaptiveSaturation);
79 |
80 | float[] saturation = process.saturationMap;
81 | float[] sat = new float[saturation.length + 1];
82 | System.arraycopy(saturation, 0, sat, 0, saturation.length);
83 | sat[saturation.length] = saturation[0];
84 |
85 | Texture satTex = new Texture(sat.length, 1, 1, Texture.Format.Float16,
86 | FloatBuffer.wrap(sat), GL_LINEAR, GL_CLAMP_TO_EDGE);
87 | converter.setTexture("saturation", satTex);
88 |
89 | // Fill with noise
90 | new Random(8682522807148012L).nextBytes(dither);
91 | mDitherTex = new Texture(ditherSize, ditherSize, 1, Texture.Format.UInt16,
92 | ByteBuffer.wrap(dither));
93 | converter.setTexture("ditherTex", mDitherTex);
94 | converter.seti("ditherSize", ditherSize);
95 |
96 | // Restore output FBO.
97 | glBindFramebuffer(GL_FRAMEBUFFER, mFbo[0]);
98 | }
99 |
100 | @Override
101 | public int getShader() {
102 | return R.raw.stage3_3_tonemap_fs;
103 | }
104 |
105 | @Override
106 | public void close() {
107 | mDitherTex.close();
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/scheduler/BootReceiver.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.scheduler;
2 |
3 | import android.content.BroadcastReceiver;
4 | import android.content.Context;
5 | import android.content.Intent;
6 |
7 | public class BootReceiver extends BroadcastReceiver {
8 | @Override
9 | public void onReceive(Context context, Intent intent) {
10 | if (Intent.ACTION_BOOT_COMPLETED.equals(intent.getAction())) {
11 | DngScanJob.scheduleJob(context);
12 | }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/scheduler/DngParseService.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.scheduler;
2 |
3 | import android.app.IntentService;
4 | import android.content.Context;
5 | import android.content.Intent;
6 | import android.net.Uri;
7 | import android.os.Handler;
8 | import android.support.annotation.Nullable;
9 | import android.util.Log;
10 | import android.widget.Toast;
11 |
12 | import java.io.File;
13 |
14 | import amirz.dngprocessor.parser.TIFFTag;
15 | import amirz.dngprocessor.util.NotifHandler;
16 | import amirz.dngprocessor.util.Path;
17 | import amirz.dngprocessor.Preferences;
18 | import amirz.dngprocessor.R;
19 | import amirz.dngprocessor.util.Utilities;
20 | import amirz.dngprocessor.parser.DngParser;
21 |
22 | import static amirz.dngprocessor.util.Utilities.ATLEAST_OREO;
23 |
24 | public class DngParseService extends IntentService {
25 | private static final String TAG = "DngParseService";
26 |
27 | public static void runForUri(Context context, Uri uri) {
28 | context = context.getApplicationContext();
29 |
30 | Intent intent = new Intent(context, DngParseService.class);
31 | intent.setData(uri);
32 |
33 | if (ATLEAST_OREO) {
34 | context.startForegroundService(intent);
35 | } else {
36 | context.startService(intent);
37 | }
38 | }
39 |
40 | public DngParseService() {
41 | super(TAG);
42 | }
43 |
44 | @Override
45 | protected void onHandleIntent(@Nullable Intent intent) {
46 | Uri uri = intent.getData();
47 | String file = Path.getFileFromUri(this, uri);
48 | Log.e(TAG, "onHandleIntent " + file);
49 |
50 | NotifHandler.create(this, file);
51 | try {
52 | Preferences pref = Preferences.global();
53 | pref.applyAll(Utilities.prefs(this), getResources());
54 | Utilities.prefs(this)
55 | .edit()
56 | .putString(getString(R.string.pref_reprocess), uri.toString())
57 | .apply();
58 |
59 | long startTime = System.currentTimeMillis();
60 | new DngParser(this, uri, file).run();
61 | long endTime = System.currentTimeMillis();
62 | Log.d(TAG, "Took " + (endTime - startTime) * 0.001f + "s to process");
63 |
64 | if (pref.deleteOriginal.get()) {
65 | String path = Path.getPathFromUri(this, uri);
66 | Log.e(TAG, "Deleting " + path);
67 | File resolvedFile = new File(path);
68 | if (resolvedFile.delete()) {
69 | sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
70 | Uri.fromFile(resolvedFile)));
71 | } else {
72 | postMsg("Could not delete " + file);
73 | }
74 | }
75 | } catch (TIFFTag.TIFFTagException e) {
76 | e.printStackTrace();
77 | postMsg("Missing metadata in " + file + ": " + e.getMessage());
78 | } catch (Exception e) {
79 | e.printStackTrace();
80 | postMsg("Could not load " + file);
81 | }
82 | NotifHandler.done(this);
83 | }
84 |
85 | private void postMsg(String msg) {
86 | new Handler(getMainLooper()).post(() ->
87 | Toast.makeText(this, msg, Toast.LENGTH_SHORT).show());
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/scheduler/DngScanJob.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.scheduler;
2 |
3 | import android.app.job.JobInfo;
4 | import android.app.job.JobParameters;
5 | import android.app.job.JobScheduler;
6 | import android.app.job.JobService;
7 | import android.content.ComponentName;
8 | import android.content.ContentResolver;
9 | import android.content.Context;
10 | import android.content.SharedPreferences;
11 | import android.net.Uri;
12 | import android.provider.MediaStore;
13 | import android.util.Log;
14 |
15 | import amirz.dngprocessor.util.Path;
16 | import amirz.dngprocessor.Preferences;
17 | import amirz.dngprocessor.util.Utilities;
18 |
19 | public class DngScanJob extends JobService {
20 | private static final String TAG = "DngScanJob";
21 | private static final int SCAN_DNG_JOB = 9500;
22 |
23 | public static final JobInfo.TriggerContentUri TRIGGER_CONTENT_URI =
24 | new JobInfo.TriggerContentUri(MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
25 | JobInfo.TriggerContentUri.FLAG_NOTIFY_FOR_DESCENDANTS);
26 |
27 | private static JobInfo sJobInfo;
28 |
29 | public static void scheduleJob(Context context) {
30 | JobScheduler js = context.getSystemService(JobScheduler.class);
31 | if (js != null) {
32 | if (sJobInfo == null) {
33 | sJobInfo = new JobInfo.Builder(SCAN_DNG_JOB,
34 | new ComponentName(context.getApplicationContext(), DngScanJob.class))
35 | .addTriggerContentUri(DngScanJob.TRIGGER_CONTENT_URI)
36 | .setTriggerContentMaxDelay(1)
37 | .build();
38 | }
39 |
40 | Log.w(TAG, "Scheduling job");
41 | js.schedule(sJobInfo);
42 | }
43 | }
44 |
45 | public DngScanJob() {
46 | }
47 |
48 | @Override
49 | public boolean onStartJob(JobParameters params) {
50 | Preferences pref = Preferences.global();
51 | boolean backgroundProcess = pref.apply(Utilities.prefs(this), getResources(), pref.backgroundProcess);
52 |
53 | StringBuilder sb = new StringBuilder();
54 | sb.append("onStartJob: Media content has changed: ");
55 |
56 | ContentResolver contentResolver = getContentResolver();
57 | SharedPreferences prefs = Utilities.prefs(this);
58 |
59 | if (params.getTriggeredContentAuthorities() != null) {
60 | if (params.getTriggeredContentUris() != null) {
61 | for (Uri uri : params.getTriggeredContentUris()) {
62 | try {
63 | String file = Path.getFileFromUri(this, uri);
64 | String key = uri.buildUpon().clearQuery().build().toString();
65 |
66 | // If this is an unprocessed RAW image, process it and save that we did.
67 | if (Path.isRaw(contentResolver, uri, file) && prefs.getBoolean(key, true)) {
68 | prefs.edit().putBoolean(key, false).apply();
69 | if (backgroundProcess) {
70 | DngParseService.runForUri(this, uri);
71 | }
72 | sb.append("PROCESS@");
73 | }
74 |
75 | sb.append(file);
76 | sb.append(", ");
77 | } catch (Exception ignored) {
78 | }
79 | }
80 | }
81 | }
82 |
83 | Log.w(TAG, sb.toString());
84 | scheduleJob(this);
85 | return true;
86 | }
87 |
88 | @Override
89 | public boolean onStopJob(JobParameters params) {
90 | Log.w(TAG, "onStopJob");
91 | return false;
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/util/Constants.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.util;
2 |
3 | public class Constants {
4 | public static final int BLOCK_HEIGHT = 48;
5 |
6 | public static final short HORZ = 1;
7 | public static final short VERT = 2;
8 | public static final short PLUS = HORZ | VERT;
9 | public static final short CROSS = 4;
10 |
11 | public static final float[] ARCSOFT_CC1 = new float[] {
12 | 1.109375f, -0.5234375f, -0.171875f,
13 | -0.96875f, 1.875f, 0.0390625f,
14 | 0.046875f, -0.171875f, 0.8984375f
15 | };
16 |
17 | public static final float[] ARCSOFT_CC2 = new float[] {
18 | 1.4375f, -0.6796875f, -0.21875f,
19 | -0.96875f, 1.875f, 0.0390625f,
20 | 0.0390625f, -0.140625f, 0.734375f
21 | };
22 |
23 | public static final float[] DIAGONAL = new float[] {
24 | 1f, 0f, 0f,
25 | 0f, 1f, 0f,
26 | 0f, 0f, 1f
27 | };
28 | }
29 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/util/NotifHandler.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.util;
2 |
3 | import android.app.Notification;
4 | import android.app.NotificationChannel;
5 | import android.app.NotificationManager;
6 | import android.app.PendingIntent;
7 | import android.app.Service;
8 | import android.content.Context;
9 | import android.content.Intent;
10 |
11 | import amirz.dngprocessor.R;
12 |
13 | import static amirz.dngprocessor.util.Utilities.ATLEAST_OREO;
14 |
15 | public class NotifHandler {
16 | private static final String TAG = "NotifHandler";
17 | private static final String CHANNEL = "default";
18 | private static final int FOREGROUND_ID = 1;
19 | private static Notification.Builder mBuilder;
20 |
21 | public static void createChannel(Context context) {
22 | if (ATLEAST_OREO) {
23 | NotificationChannel channel = new NotificationChannel(CHANNEL, "Default",
24 | NotificationManager.IMPORTANCE_LOW);
25 | channel.enableLights(false);
26 | channel.enableVibration(false);
27 | manager(context).createNotificationChannel(channel);
28 | }
29 | }
30 |
31 | public static void create(Service service, String name) {
32 | PendingIntent pendingIntent = PendingIntent.getActivity(service, 0, new Intent(), 0);
33 | if (ATLEAST_OREO) {
34 | mBuilder = new Notification.Builder(service, CHANNEL);
35 | } else {
36 | mBuilder = new Notification.Builder(service);
37 | }
38 |
39 | mBuilder.setSmallIcon(R.drawable.ic_notif)
40 | .setContentTitle("Processing " + name)
41 | .setContentIntent(pendingIntent);
42 |
43 | service.startForeground(FOREGROUND_ID, mBuilder.build());
44 | }
45 |
46 | public static void progress(Context context, int max, int progress) {
47 | Notification notif = mBuilder.setProgress(max, progress, false).build();
48 | manager(context).notify(FOREGROUND_ID, notif);
49 | }
50 |
51 | public static void done(Service service) {
52 | service.stopForeground(true);
53 | }
54 |
55 | private static NotificationManager manager(Context context) {
56 | return context.getSystemService(NotificationManager.class);
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/util/Path.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.util;
2 |
3 | import android.content.ContentResolver;
4 | import android.content.ContentUris;
5 | import android.content.Context;
6 | import android.database.Cursor;
7 | import android.net.Uri;
8 | import android.os.Environment;
9 | import android.provider.DocumentsContract;
10 | import android.provider.MediaStore;
11 | import android.provider.OpenableColumns;
12 | import android.util.Log;
13 |
14 | import java.io.File;
15 |
16 | import amirz.dngprocessor.Preferences;
17 |
18 | public class Path {
19 | private static final String TAG = "Path";
20 |
21 | public static final String EXT_RAW = ".dng";
22 | public static final String EXT_JPG = ".jpg";
23 | public static final String EXT_JPG_SUFFIX = "_DNGP" + EXT_JPG;
24 |
25 | public static final String MIME_RAW = "image/x-adobe-dng";
26 | public static final String MIME_JPG = "image/jpeg";
27 |
28 | public static final String ROOT = Environment.getExternalStorageDirectory().toString();
29 |
30 | public static boolean isRaw(ContentResolver contentResolver, Uri uri, String file) {
31 | String mime = contentResolver.getType(uri);
32 | return MIME_RAW.equals(mime) || (MIME_JPG.equals(mime) && file.endsWith(Path.EXT_RAW));
33 | }
34 |
35 | public static String processedPath(String dir, String name) {
36 | dir = ROOT + File.separator + dir;
37 | File folder = new File(dir);
38 | if (!folder.exists() && !folder.mkdir()) {
39 | throw new RuntimeException("Cannot create " + dir);
40 | }
41 | name = name.replace(EXT_RAW, Preferences.global().suffix.get() ? EXT_JPG_SUFFIX : EXT_JPG);
42 | return dir + File.separator + name;
43 | }
44 |
45 | public static String getFileFromUri(Context context, Uri uri) {
46 | String fileName = getColumn(context, uri, OpenableColumns.DISPLAY_NAME);
47 |
48 | /* document/raw:PATH */
49 | if (fileName == null) {
50 | String result = getPathFromUri(context, uri);
51 | int cut = result.lastIndexOf('/');
52 | if (cut != -1) {
53 | fileName = result.substring(cut + 1);
54 | }
55 | }
56 |
57 | Log.d(TAG, "Resolved " + uri.toString() + " to name " + fileName);
58 | return fileName;
59 | }
60 |
61 | public static String getPathFromUri(Context context, Uri uri) {
62 | String filePath = getColumn(context, uri, MediaStore.Images.Media.DATA);
63 |
64 | /* document/raw:PATH */
65 | if (filePath == null) {
66 | filePath = uri.getPath();
67 | if (filePath.contains(":")) {
68 | String[] split = filePath.split(":");
69 | filePath = split[split.length - 1];
70 | }
71 | }
72 |
73 | Log.d(TAG, "Resolved " + uri.toString() + " to path " + filePath);
74 | return filePath;
75 | }
76 |
77 | private static String getColumn(Context context, Uri uri, String column) {
78 | ContentResolver cr = context.getContentResolver();
79 | String result = null;
80 | if (DocumentsContract.isDocumentUri(context, uri)) {
81 | String id = DocumentsContract.getDocumentId(uri);
82 | if (id.contains(":")) {
83 | id = id.split(":")[1];
84 | }
85 |
86 | String p = uri.getPath();
87 | if (p != null) {
88 | if (p.startsWith("/document/image")) {
89 | /* document/image:NUM */
90 | result = query(cr,
91 | MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
92 | column,
93 | MediaStore.Images.Media._ID + "=?",
94 | new String[] { id });
95 | } else if (p.startsWith("/document")) {
96 | /* document/NUM */
97 | try {
98 | long l = Long.parseLong(id);
99 | result = query(cr, ContentUris.withAppendedId(
100 | Uri.parse("content://downloads/public_downloads"), l),
101 | column);
102 | } catch (Exception ignored) {
103 | }
104 | }
105 | }
106 | }
107 |
108 | /* media/external/images/media/NUM */
109 | if (result == null) {
110 | result = query(cr, uri, column);
111 | }
112 |
113 | return result;
114 | }
115 |
116 | private static String query(ContentResolver cr, Uri uri, String column) {
117 | return query(cr, uri, column, null, null);
118 | }
119 |
120 | private static String query(ContentResolver cr, Uri uri, String column, String selection, String[] selectionArgs) {
121 | try (Cursor cursor = cr.query(
122 | uri, new String[] { column }, selection, selectionArgs, null)) {
123 | if (cursor != null) {
124 | int columnIndex = cursor.getColumnIndex(column);
125 | if (cursor.moveToFirst()) {
126 | return cursor.getString(columnIndex);
127 | }
128 | }
129 | }
130 | return null;
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/util/ShaderLoader.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.util;
2 |
3 | import android.content.Context;
4 | import android.content.res.Resources;
5 |
6 | import java.io.BufferedReader;
7 | import java.io.IOException;
8 | import java.io.InputStream;
9 | import java.io.InputStreamReader;
10 | import java.util.HashMap;
11 | import java.util.Map;
12 |
13 | import amirz.dngprocessor.R;
14 |
15 | public class ShaderLoader {
16 | private static ShaderLoader sInstance;
17 |
18 | public static ShaderLoader getInstance(Context context) {
19 | if (sInstance == null) {
20 | sInstance = new ShaderLoader(context.getApplicationContext().getResources());
21 |
22 | sInstance.mapImport("gamma", R.raw.import_gamma);
23 | sInstance.mapImport("gaussian", R.raw.import_gaussian);
24 | sInstance.mapImport("load3x3", R.raw.import_load3x3);
25 | sInstance.mapImport("load3x3v2", R.raw.import_load3x3v2);
26 | sInstance.mapImport("load3x3v3", R.raw.import_load3x3v3);
27 | sInstance.mapImport("load5x5v3", R.raw.import_load5x5v3);
28 | sInstance.mapImport("sigmoid", R.raw.import_sigmoid);
29 | sInstance.mapImport("xyytoxyz", R.raw.import_xyy_to_xyz);
30 | sInstance.mapImport("xyztoxyy", R.raw.import_xyz_to_xyy);
31 | }
32 | return sInstance;
33 | }
34 |
35 | private final Resources mRes;
36 | private final Map mImports = new HashMap<>();
37 | private final Map mRaws = new HashMap<>();
38 |
39 | private ShaderLoader(Resources res) {
40 | mRes = res;
41 | }
42 |
43 | private void mapImport(String name, int resId) {
44 | mImports.put("#include " + name, readRawInternal(resId, false)
45 | .replace('\n', ' ')
46 | .replace('\r', ' '));
47 | }
48 |
49 | public String readRaw(int resId) {
50 | return mRaws.computeIfAbsent(resId, this::readRawInternal);
51 | }
52 |
53 | private String readRawInternal(int resId) {
54 | return readRawInternal(resId, true);
55 | }
56 |
57 | private String readRawInternal(int resId, boolean process) {
58 | try (InputStream inputStream = mRes.openRawResource(resId)) {
59 | InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
60 | BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
61 |
62 | StringBuilder text = new StringBuilder();
63 | String line;
64 | while ((line = bufferedReader.readLine()) != null) {
65 | text.append(process ? mImports.getOrDefault(line, line) : line);
66 | text.append('\n');
67 | }
68 |
69 | return text.toString();
70 | } catch (IOException e) {
71 | throw new RuntimeException(e);
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/dngprocessor/util/Utilities.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor.util;
2 |
3 | import android.content.Context;
4 | import android.content.SharedPreferences;
5 | import android.os.Build;
6 |
7 | public class Utilities {
8 | public static final boolean ATLEAST_OREO = Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
9 |
10 | /**
11 | * Globally used preferences.
12 | * @param context Context instance used to retrieve the {@link SharedPreferences} instance.
13 | * @return Single {@link SharedPreferences} instance that is used by the application.
14 | */
15 | public static SharedPreferences prefs(Context context) {
16 | return context.getSharedPreferences(context.getPackageName(), Context.MODE_PRIVATE);
17 | }
18 |
19 | public static String logGainMap(float[] gainMap) {
20 | StringBuilder log = new StringBuilder();
21 | for (float f : gainMap) {
22 | log.append(f);
23 | log.append("f, ");
24 | }
25 | return log.toString();
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/library/settings/GlobalPreferences.java:
--------------------------------------------------------------------------------
1 | package amirz.library.settings;
2 |
3 | import android.content.SharedPreferences;
4 | import android.content.res.Resources;
5 | import android.util.Log;
6 | import android.util.SparseArray;
7 | import android.util.TypedValue;
8 |
9 | import java.util.ArrayList;
10 | import java.util.List;
11 |
12 | /**
13 | * Class that provides synchronized preferences using the singleton design pattern.
14 | * Extensions should add a static getInstance() method.
15 | */
16 | public abstract class GlobalPreferences {
17 | private static final String TAG = "GlobalPreferences";
18 | private final SparseArray[ mPreferences = new SparseArray<>();
19 |
20 | /**
21 | * Loads all preferences from the SharedPreferences instance.
22 | * @param prefs Instance from which the data is pulled.
23 | * @param res Resources used to deserialize the default values as fallback values.
24 | */
25 | public void applyAll(SharedPreferences prefs, Resources res) {
26 | for (int i = 0; i < mPreferences.size(); i++) {
27 | // noinspection unchecked
28 | apply(prefs, res, mPreferences.valueAt(i));
29 | }
30 | }
31 |
32 | /**
33 | * Loads one key's preference from the SharedPreferences instance.
34 | * @param prefs Instance from which the data is pulled.
35 | * @param res Resources used to deserialize the default value as a fallback value.
36 | * @param tunable Reference to preference.
37 | * @return New value of the preference.
38 | */
39 | public T apply(SharedPreferences prefs, Resources res, Ref tunable) {
40 | String key = res.getString(tunable.settingId);
41 | TypedValue defaultValue = new TypedValue();
42 | res.getValue(tunable.defaultId, defaultValue, true);
43 |
44 | Log.d(TAG, "Updating " + key);
45 | tunable.load(prefs, key, defaultValue);
46 | return tunable.get();
47 | }
48 |
49 | public final class ResetContext implements AutoCloseable {
50 | private final SharedPreferences mPrefs;
51 | private final Resources mRes;
52 | private final List mReset = new ArrayList<>();
53 |
54 | private ResetContext(SharedPreferences prefs, Resources res) {
55 | mPrefs = prefs;
56 | mRes = res;
57 | }
58 |
59 | public void reset(Ref tunable) {
60 | mReset.add(mRes.getString(tunable.settingId));
61 | }
62 |
63 | @Override
64 | public void close() {
65 | SharedPreferences.Editor edit = mPrefs.edit();
66 | for (String reset : mReset) {
67 | edit.remove(reset);
68 | }
69 | edit.apply();
70 | GlobalPreferences.this.applyAll(mPrefs, mRes);
71 | }
72 | }
73 |
74 | public interface ResetContextFunc {
75 | void onReset(ResetContext ctx);
76 | }
77 |
78 | public void reset(SharedPreferences prefs, Resources res, ResetContextFunc todo) {
79 | try (ResetContext ctx = new ResetContext(prefs, res)) {
80 | todo.onReset(ctx);
81 | }
82 | }
83 |
84 | /**
85 | * Referenced setting that holds a boolean.
86 | */
87 | public final class BooleanRef extends Ref {
88 | public BooleanRef(int settingId, int defaultId) {
89 | super(settingId, defaultId);
90 | }
91 |
92 | @Override
93 | void load(SharedPreferences prefs, String key, TypedValue defaultValue) {
94 | value = prefs.getBoolean(key, defaultValue.data == 1);
95 | }
96 | }
97 |
98 | /**
99 | * Referenced setting that holds a floating point number.
100 | */
101 | public final class FloatRef extends Ref {
102 | public FloatRef(int settingId, int defaultId) {
103 | super(settingId, defaultId);
104 | }
105 |
106 | @Override
107 | void load(SharedPreferences prefs, String key, TypedValue defaultValue) {
108 | String defaultString = defaultValue.coerceToString().toString();
109 | value = Float.valueOf(prefs.getString(key, defaultString));
110 | }
111 | }
112 |
113 | /**
114 | * Referenced setting that holds an integer.
115 | */
116 | public final class IntegerRef extends Ref {
117 | public IntegerRef(int settingId, int defaultId) {
118 | super(settingId, defaultId);
119 | }
120 |
121 | @Override
122 | void load(SharedPreferences prefs, String key, TypedValue defaultValue) {
123 | String defaultString = defaultValue.coerceToString().toString();
124 | value = Integer.valueOf(prefs.getString(key, defaultString));
125 | }
126 | }
127 |
128 | /**
129 | * Referenced setting that holds a string.
130 | */
131 | public final class StringRef extends Ref {
132 | public StringRef(int settingId, int entries) {
133 | super(settingId, entries);
134 | }
135 |
136 | @Override
137 | void load(SharedPreferences prefs, String key, TypedValue defaultValue) {
138 | String defaultString = defaultValue.coerceToString().toString();
139 | value = prefs.getString(key, defaultString);
140 | }
141 | }
142 |
143 | private abstract class Ref {
144 | T value;
145 | final int settingId;
146 | final int defaultId;
147 |
148 | Ref(int settingId, int defaultId) {
149 | this.settingId = settingId;
150 | this.defaultId = defaultId;
151 | mPreferences.append(settingId, this);
152 | }
153 |
154 | public T get() {
155 | return value;
156 | }
157 |
158 | abstract void load(SharedPreferences prefs, String key, TypedValue defaultValue);
159 | }
160 |
161 | /**
162 | * Empty constructor that prevents direct instantiation of this class.
163 | */
164 | protected GlobalPreferences() {
165 | }
166 | }
167 |
--------------------------------------------------------------------------------
/app/src/main/java/amirz/library/settings/TextPreference.java:
--------------------------------------------------------------------------------
1 | package amirz.library.settings;
2 |
3 | import android.content.Context;
4 | import android.preference.EditTextPreference;
5 | import android.text.TextUtils;
6 | import android.util.AttributeSet;
7 |
8 | public class TextPreference extends EditTextPreference {
9 | public TextPreference(Context context, AttributeSet attrs) {
10 | super(context, attrs);
11 | }
12 |
13 | @Override
14 | public CharSequence getSummary() {
15 | CharSequence summary = super.getSummary();
16 | if (TextUtils.isEmpty(summary)) {
17 | return getText();
18 | }
19 | return getText() + " " + summary;
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_notif.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_gamma.glsl:
--------------------------------------------------------------------------------
1 | /* sRGB Gamma Function */
2 | float gammaEncode(float x) {
3 | return x <= 0.0031308f
4 | ? x * 12.92f
5 | : 1.055f * pow(x, 0.4166667f) - 0.055f;
6 | }
7 |
8 | /* Inverse */
9 | float gammaDecode(float x) {
10 | return x <= 0.0404500f
11 | ? x * 0.0773994f
12 | : pow(0.9478673f * (x + 0.055f), 2.4f);
13 | }
14 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_gaussian.glsl:
--------------------------------------------------------------------------------
1 | float unscaledGaussian(float d, float s) {
2 | float interm = d / s;
3 | return exp(-0.5f * interm * interm);
4 | }
5 |
6 | vec3 unscaledGaussian(vec3 d, float s) {
7 | vec3 interm = d / s;
8 | return exp(-0.5f * interm * interm);
9 | }
10 |
11 | vec3 unscaledGaussian(vec3 d, vec3 s) {
12 | vec3 interm = d / s;
13 | return exp(-0.5f * interm * interm);
14 | }
15 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_load3x3.glsl:
--------------------------------------------------------------------------------
1 | float[9] load3x3(ivec2 xy, sampler2D buf) {
2 | float outputArray[9];
3 | for (int i = 0; i < 9; i++) {
4 | outputArray[i] = texelFetch(buf, xy + ivec2((i % 3) - 1, (i / 3) - 1), 0).x;
5 | }
6 | return outputArray;
7 | }
8 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_load3x3v2.glsl:
--------------------------------------------------------------------------------
1 | vec2[9] load3x3(ivec2 xy, int n, sampler2D buf) {
2 | vec2 outputArray[9];
3 | for (int i = 0; i < 9; i++) {
4 | outputArray[i] = texelFetch(buf, xy + n * ivec2((i % 3) - 1, (i / 3) - 1), 0).xy;
5 | }
6 | return outputArray;
7 | }
8 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_load3x3v3.glsl:
--------------------------------------------------------------------------------
1 | vec3[9] load3x3(ivec2 xy, int n, sampler2D buf) {
2 | vec3 outputArray[9];
3 | for (int i = 0; i < 9; i++) {
4 | outputArray[i] = texelFetch(buf, xy + n * ivec2((i % 3) - 1, (i / 3) - 1), 0).xyz;
5 | }
6 | return outputArray;
7 | }
8 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_load5x5v3.glsl:
--------------------------------------------------------------------------------
1 | vec3[25] load5x5(ivec2 xy, int n, sampler2D buf) {
2 | vec3 outputArray[25];
3 | for (int i = 0; i < 25; i++) {
4 | outputArray[i] = texelFetch(buf, xy + n * ivec2((i % 5) - 2, (i / 5) - 2), 0).xyz;
5 | }
6 | return outputArray;
7 | }
8 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_sigmoid.glsl:
--------------------------------------------------------------------------------
1 | float sigmoid(float val, float transfer) {
2 | if (val > transfer) {
3 | /* This variable maps the cut off point in the linear curve to the sigmoid */
4 | float a = log((1.f + transfer) / (1.f - transfer)) / transfer;
5 |
6 | /* Transform val using the sigmoid curve */
7 | val = 2.f / (1.f + exp(-a * val)) - 1.f;
8 | }
9 | return val;
10 | }
11 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_xyy_to_xyz.glsl:
--------------------------------------------------------------------------------
1 | vec3 xyYtoXYZ(vec3 xyY) {
2 | vec3 result = vec3(0.f, xyY.z, 0.f);
3 | if (xyY.y > 0.f) {
4 | result.x = xyY.x * xyY.z / xyY.y;
5 | result.z = (1.f - xyY.x - xyY.y) * xyY.z / xyY.y;
6 | }
7 | return result;
8 | }
9 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/import_xyz_to_xyy.glsl:
--------------------------------------------------------------------------------
1 | vec3 XYZtoxyY(vec3 XYZ) {
2 | vec3 result = vec3(0.345703f, 0.358539f, XYZ.y);
3 | float sum = XYZ.x + XYZ.y + XYZ.z;
4 | if (sum > 0.0001f) {
5 | result.xy = XYZ.xy / sum;
6 | }
7 | return result;
8 | }
9 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/passthrough_vs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | in vec4 vPosition;
6 |
7 | void main() {
8 | // Forward position to fragment shader
9 | gl_Position = vPosition;
10 | }
11 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage1_1_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 | precision mediump usampler2D;
5 |
6 | uniform usampler2D rawBuffer;
7 | uniform int rawWidth;
8 | uniform int rawHeight;
9 |
10 | uniform sampler2D gainMap;
11 | uniform usampler2D hotPixels;
12 | uniform ivec2 hotPixelsSize;
13 |
14 | // Sensor and picture variables
15 | uniform int cfaPattern; // The Color Filter Arrangement pattern used
16 | uniform vec4 blackLevel; // Blacklevel to subtract for each channel, given in CFA order
17 | uniform float whiteLevel; // Whitelevel of sensor
18 |
19 | // Out
20 | out float intermediate;
21 |
22 | void main() {
23 | ivec2 xy = ivec2(gl_FragCoord.xy);
24 |
25 | float v;
26 | int pxInfo = int(texelFetch(hotPixels, xy % hotPixelsSize, 0).x);
27 | if (pxInfo == 0) {
28 | v = float(texelFetch(rawBuffer, xy, 0).x);
29 | } else {
30 | uint vx;
31 | int c;
32 | if ((pxInfo & 1) > 0) {
33 | // HORIZONTAL INTERPOLATE
34 | for (int j = -2; j <= 2; j += 4) {
35 | vx += texelFetch(rawBuffer, xy + ivec2(j, 0), 0).x;
36 | }
37 | c += 2;
38 | }
39 | if ((pxInfo & 2) > 0) {
40 | // VERTICAL INTERPOLATE
41 | for (int j = -2; j <= 2; j += 4) {
42 | vx += texelFetch(rawBuffer, xy + ivec2(0, j), 0).x;
43 | }
44 | c += 2;
45 | }
46 | if ((pxInfo & 4) > 0) {
47 | // CROSS INTERPOLATE
48 | for (int j = 0; j < 4; j++) {
49 | vx += texelFetch(rawBuffer, xy + ivec2(2 * (j % 4) - 1, 2 * (j / 4) - 1), 0).x;
50 | }
51 | c += 4;
52 | }
53 | v = float(vx) / float(c);
54 | }
55 |
56 | vec2 xyInterp = vec2(float(xy.x) / float(rawWidth), float(xy.y) / float(rawHeight));
57 | vec4 gains = texture(gainMap, xyInterp);
58 | int index = (xy.x & 1) | ((xy.y & 1) << 1); // bits [0,1] are blacklevel offset
59 | //index |= (cfaPattern << 2);
60 | float bl = 0.f;
61 | float g = 1.f;
62 | switch (index) {
63 | // RGGB
64 | case 0: bl = blackLevel.x; g = gains.x; break;
65 | case 1: bl = blackLevel.y; g = gains.y; break;
66 | case 2: bl = blackLevel.z; g = gains.z; break;
67 | case 3: bl = blackLevel.w; g = gains.w; break;
68 | /*
69 | // GRBG
70 | case 4: bl = blackLevel.x; g = gains.y; break;
71 | case 5: bl = blackLevel.y; g = gains.x; break;
72 | case 6: bl = blackLevel.z; g = gains.w; break;
73 | case 7: bl = blackLevel.w; g = gains.z; break;
74 | // GBRG
75 | case 8: bl = blackLevel.x; g = gains.y; break;
76 | case 9: bl = blackLevel.y; g = gains.w; break;
77 | case 10: bl = blackLevel.z; g = gains.x; break;
78 | case 11: bl = blackLevel.w; g = gains.z; break;
79 | // BGGR
80 | case 12: bl = blackLevel.x; g = gains.w; break;
81 | case 13: bl = blackLevel.y; g = gains.y; break;
82 | case 14: bl = blackLevel.z; g = gains.z; break;
83 | case 15: bl = blackLevel.w; g = gains.x; break;
84 | */
85 | }
86 |
87 | intermediate = g * (v - bl) / (whiteLevel - bl);
88 | }
89 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage1_2_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D rawBuffer;
6 | uniform int rawWidth;
7 | uniform int rawHeight;
8 |
9 | // Sensor and picture variables
10 | uniform int cfaPattern; // The Color Filter Arrangement pattern used
11 |
12 | // Out
13 | out float intermediate;
14 |
15 | int ind(int x, int y) {
16 | int dim = 5;
17 | return x + dim / 2 + (y + dim / 2) * dim;
18 | }
19 |
20 | float fetch(ivec2 xy, int dx, int dy) {
21 | return texelFetch(rawBuffer, xy + ivec2(dx, dy), 0).x;
22 | }
23 |
24 | float demosaicG(ivec2 xy) {
25 | int index = (xy.x & 1) | ((xy.y & 1) << 1);
26 | index |= (cfaPattern << 2);
27 | float p = fetch(xy, 0, 0);
28 | switch (index) {
29 | // RGR
30 | case 1: // R[G] G B
31 | case 4: // [G]R B G
32 | case 11: // G B R[G]
33 | case 14: // B G [G]R
34 | // BGB
35 | case 2: // R G [G]B
36 | case 7: // G R B[G]
37 | case 8: // [G]B R G
38 | case 13: // B[G] G R
39 | return p;
40 | }
41 |
42 | float l = fetch(xy, -1, 0),
43 | r = fetch(xy, 1, 0),
44 | t = fetch(xy, 0, -1),
45 | b = fetch(xy, 0, 1);
46 |
47 | // Laroche and Prescott
48 | float p2 = 2.f * p;
49 |
50 | float dxp = p2 - fetch(xy, -2, 0) - fetch(xy, 2, 0);
51 | float dx = abs(l - r) + abs(dxp);
52 |
53 | float dyp = p2 - fetch(xy, 0, -2) - fetch(xy, 0, 2);
54 | float dy = abs(t - b) + abs(dyp);
55 |
56 | // Su
57 | float gx = (l + r) * 0.5f + dxp * 0.25f;
58 | float gy = (t + b) * 0.5f + dyp * 0.25f;
59 |
60 | float w1 = 0.87f;
61 | float w2 = 0.13f;
62 |
63 | if (dx < dy) {
64 | p = w1 * gx + w2 * gy;
65 | } else if (dx > dy) {
66 | p = w1 * gy + w2 * gx;
67 | } else {
68 | p = (gx + gy) * 0.5f;
69 | }
70 |
71 | return max(p, 0.f);
72 | }
73 |
74 | void main() {
75 | ivec2 xy = ivec2(gl_FragCoord.xy);
76 | int x = clamp(xy.x, 2, rawWidth - 3);
77 | int y = clamp(xy.y, 2, rawHeight - 3);
78 | intermediate = demosaicG(ivec2(x, y));
79 | }
80 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage1_3_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D rawBuffer;
6 | uniform sampler2D greenBuffer;
7 | uniform int rawWidth;
8 | uniform int rawHeight;
9 |
10 | // Sensor and picture variables
11 | uniform sampler2D gainMap;
12 | uniform int cfaPattern; // The Color Filter Arrangement pattern used
13 | uniform vec4 neutralLevel; // Neutrallevel of sensor
14 | uniform vec3 neutralPoint; // The camera neutral
15 |
16 | // Transform
17 | uniform mat3 sensorToXYZ; // Color transform from sensor to XYZ.
18 |
19 | const int demosaicArray[16] = int[](
20 | 0, 1, 2, 3,
21 | 1, 0, 3, 2,
22 | 2, 3, 0, 1,
23 | 3, 2, 1, 0
24 | );
25 |
26 | // Out
27 | out vec3 intermediate;
28 |
29 | #include xyztoxyy
30 |
31 | vec4 getCross(sampler2D buf, ivec2 xy) {
32 | return vec4(
33 | texelFetch(buf, xy + ivec2(-1, -1), 0).x,
34 | texelFetch(buf, xy + ivec2(1, -1), 0).x,
35 | texelFetch(buf, xy + ivec2(-1, 1), 0).x,
36 | texelFetch(buf, xy + ivec2(1, 1), 0).x
37 | );
38 | }
39 |
40 | vec2 getHorz(sampler2D buf, ivec2 xy) {
41 | return vec2(
42 | texelFetch(buf, xy + ivec2(-1, 0), 0).x,
43 | texelFetch(buf, xy + ivec2(1, 0), 0).x
44 | );
45 | }
46 |
47 | vec2 getVert(sampler2D buf, ivec2 xy) {
48 | return vec2(
49 | texelFetch(buf, xy + ivec2(0, -1), 0).x,
50 | texelFetch(buf, xy + ivec2(0, 1), 0).x
51 | );
52 | }
53 |
54 | float getScale(vec2 raw, vec2 green, float minG) {
55 | return dot(raw / max(green, minG), vec2(0.5f));
56 | }
57 |
58 | float getScale(vec4 raw, vec4 green, float minG) {
59 | return dot(raw / max(green, minG), vec4(0.25f));
60 | }
61 |
62 | // Apply bilinear-interpolation to demosaic
63 | vec3 demosaic(ivec2 xy) {
64 | int x = xy.x;
65 | int y = xy.y;
66 |
67 | int index = (x & 1) | ((y & 1) << 1);
68 | index |= (cfaPattern << 2);
69 | vec3 pRGB;
70 | int pxType = demosaicArray[index];
71 |
72 | // We already computed green
73 | pRGB.g = texelFetch(greenBuffer, xy, 0).x;
74 | float minG = 0.01f;
75 | float g = max(pRGB.g, minG);
76 |
77 | if (pxType == 0 || pxType == 3) {
78 | float p = texelFetch(rawBuffer, xy, 0).x;
79 | float cross = g * getScale(
80 | getCross(rawBuffer, xy),
81 | getCross(greenBuffer, xy),
82 | minG
83 | );
84 | if (pxType == 0) {
85 | // Red centered
86 | // B # B
87 | // # R #
88 | // B # B
89 | pRGB.r = p;
90 | pRGB.b = cross;
91 | } else {
92 | // Blue centered
93 | // R # R
94 | // # B #
95 | // R # R
96 | pRGB.r = cross;
97 | pRGB.b = p;
98 | }
99 | } else if (pxType == 1 || pxType == 2) {
100 | float horz = g * getScale(
101 | getHorz(rawBuffer, xy),
102 | getHorz(greenBuffer, xy),
103 | minG
104 | );
105 | float vert = g * getScale(
106 | getVert(rawBuffer, xy),
107 | getVert(greenBuffer, xy),
108 | minG
109 | );
110 | if (pxType == 1) {
111 | // Green centered w/ horizontally adjacent Red
112 | // # B #
113 | // R # R
114 | // # B #
115 | pRGB.r = horz;
116 | pRGB.b = vert;
117 | } else {
118 | // Green centered w/ horizontally adjacent Blue
119 | // # R #
120 | // B # B
121 | // # R #
122 | pRGB.r = vert;
123 | pRGB.b = horz;
124 | }
125 | }
126 |
127 | return pRGB;
128 | }
129 |
130 |
131 | vec3 convertSensorToIntermediate(ivec2 xy, vec3 sensor) {
132 | // Use gainmap to increase dynamic range.
133 | vec2 xyInterp = vec2(float(xy.x) / float(rawWidth), float(xy.y) / float(rawHeight));
134 | vec4 gains = texture(gainMap, xyInterp);
135 | vec3 neutralScaled = min(min(gains.x, gains.y), min(gains.z, gains.w)) * neutralPoint;
136 |
137 | vec3 npf = sensor / neutralScaled;
138 | sensor = min(sensor, neutralScaled);
139 |
140 | // When both red and blue channels are above white point, assume green is too
141 | // So extend dynamic range by scaling white point
142 | // Use a bias so only high green values become higher
143 | // In highlights, bias should be one
144 | float bias = npf.g * npf.g * npf.g;
145 | sensor *= mix(1.f, max(npf.r + npf.b, 2.f) * 0.5f, bias);
146 |
147 | vec3 XYZ = sensorToXYZ * sensor;
148 | vec3 intermediate = XYZtoxyY(XYZ);
149 |
150 | return intermediate;
151 | }
152 |
153 | void main() {
154 | ivec2 xy = ivec2(gl_FragCoord.xy);
155 | ivec2 xyClamped = clamp(xy, ivec2(1), ivec2(rawWidth, rawHeight) - 2);
156 |
157 | vec3 sensor = demosaic(xyClamped);
158 | intermediate = convertSensorToIntermediate(xy, sensor);
159 | }
160 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage1_4_edge_mirror_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D intermediateBuffer;
6 | uniform ivec2 minxy;
7 | uniform ivec2 maxxy;
8 |
9 | // Out
10 | out vec3 intermediate;
11 |
12 | ivec2 mirrorOOBCoords(ivec2 coords) {
13 | if (coords.x < minxy.x)
14 | coords.x = 2 * minxy.x - coords.x;
15 | else if (coords.x > maxxy.x)
16 | coords.x = 2 * maxxy.x - coords.x;
17 |
18 | if (coords.y < minxy.y)
19 | coords.y = 2 * minxy.y - coords.y;
20 | else if (coords.y > maxxy.y)
21 | coords.y = 2 * maxxy.y - coords.y;
22 |
23 | return coords;
24 | }
25 |
26 | void main() {
27 | ivec2 xy = ivec2(gl_FragCoord.xy);
28 | intermediate = texelFetch(intermediateBuffer, mirrorOOBCoords(xy), 0).xyz;
29 | }
30 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_0_blur_3ch_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 | uniform ivec2 bufSize;
7 |
8 | uniform float sigma;
9 | uniform int radius;
10 | uniform ivec2 dir;
11 |
12 | out vec3 result;
13 |
14 | #include gaussian
15 |
16 | void main() {
17 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
18 |
19 | vec3 I = vec3(0.f);
20 | float W = 0.f;
21 | vec3 xyY;
22 | float scale;
23 |
24 | for (int i = -radius; i <= radius; i++) {
25 | ivec2 xy = xyCenter + i * dir;
26 | if (xy.x >= 0 && xy.y >= 0 && xy.x < bufSize.x && xy.y < bufSize.y) {
27 | xyY = texelFetch(buf, xyCenter + i * dir, 0).xyz;
28 | scale = unscaledGaussian(float(i), sigma);
29 | I += xyY * scale;
30 | W += scale;
31 | }
32 | }
33 |
34 | result = I / W;
35 | }
36 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_1_bilateral_ch.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | // Bilateral filter
4 | precision mediump float;
5 |
6 | // Use buf to blur luma while keeping chroma.
7 | uniform sampler2D buf;
8 | uniform ivec2 bufSize;
9 |
10 | uniform vec2 sigma;
11 | uniform ivec2 radius;
12 |
13 | // Out
14 | out float result;
15 |
16 | #include gaussian
17 |
18 | // Difference
19 | float fr(float diffi) {
20 | return unscaledGaussian(diffi, sigma.x);
21 | }
22 |
23 | // Distance
24 | float gs(float diffx) {
25 | //return 1.f / (diffx * diffx + 1.f);
26 | return unscaledGaussian(diffx, sigma.y);
27 | }
28 |
29 | void main() {
30 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
31 |
32 | float valCenter = texelFetch(buf, xyCenter, 0).x;
33 |
34 | ivec2 minxy = max(ivec2(0, 0), xyCenter - radius.x);
35 | ivec2 maxxy = min(bufSize - 1, xyCenter + radius.x);
36 |
37 | float I = 0.f;
38 | float W = 0.f;
39 |
40 | for (int y = minxy.y; y <= maxxy.y; y += radius.y) {
41 | for (int x = minxy.x; x <= maxxy.x; x += radius.y) {
42 | ivec2 xyPixel = ivec2(x, y);
43 |
44 | float valPixel = texelFetch(buf, xyPixel, 0).x;
45 |
46 | vec2 dxy = vec2(xyPixel - xyCenter);
47 |
48 | float scale = fr(abs(valPixel - valCenter)) * gs(length(dxy));
49 | I += valPixel * scale;
50 | W += scale;
51 | }
52 | }
53 |
54 | if (W < 0.0001f) {
55 | result = valCenter;
56 | } else {
57 | result = I / W;
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_1_noise_level_blur_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 | uniform ivec2 minxy;
7 | uniform ivec2 maxxy;
8 |
9 | uniform float sigma;
10 | uniform ivec2 radius;
11 |
12 | uniform ivec2 dir;
13 |
14 | // Out
15 | out vec3 result;
16 |
17 | #include gaussian
18 |
19 | void main() {
20 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
21 |
22 | vec3 I = vec3(0.f);
23 | float W = 0.f;
24 |
25 | for (int i = -radius.x; i <= radius.x; i += radius.y) {
26 | ivec2 xy = xyCenter + i * dir;
27 | if (xy.x >= minxy.x && xy.y >= minxy.y && xy.x <= maxxy.x && xy.y <= maxxy.y) {
28 | vec3 XYZ = texelFetch(buf, xyCenter + i * dir, 0).xyz;
29 | float scale = unscaledGaussian(float(i), sigma);
30 | I += XYZ * scale;
31 | W += scale;
32 | }
33 | }
34 |
35 | result = I / W;
36 | }
37 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_1_noise_level_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | #define WEIGHTS vec3(vec2(96.f), 3.f)
4 |
5 | precision mediump float;
6 |
7 | uniform sampler2D intermediate;
8 | uniform ivec2 bufSize;
9 |
10 | uniform int radius;
11 |
12 | // Out
13 | out vec3 result;
14 |
15 | #include load3x3v3
16 |
17 | void main() {
18 | ivec2 xyCenter = ivec2(gl_FragCoord.xy) * 4;
19 |
20 | ivec2 minxy = xyCenter - 1;
21 | ivec2 maxxy = xyCenter + 1;
22 |
23 | ivec2 xyPixel;
24 | vec3[9] impatch;
25 | int i = 0;
26 | for (int y = minxy.y; y <= maxxy.y; y += 1) {
27 | for (int x = minxy.x; x <= maxxy.x; x += 1) {
28 | impatch[i++] = texelFetch(intermediate, clamp(ivec2(x, y), ivec2(0), bufSize - 1), 0).xyz;
29 | }
30 | }
31 |
32 | vec3 gradientHor = abs(impatch[4] - impatch[3]) + abs(impatch[4] - impatch[5]);
33 | vec3 gradientVert = abs(impatch[4] - impatch[1]) + abs(impatch[4] - impatch[7]);
34 | vec3 gradientNE = abs(impatch[4] - impatch[2]) + abs(impatch[4] - impatch[6]);
35 | vec3 gradientNW = abs(impatch[4] - impatch[0]) + abs(impatch[4] - impatch[8]);
36 |
37 | vec3 gradientMax = max(max(gradientHor, gradientVert), max(gradientNE, gradientNW));
38 | vec3 gradientMin = min(min(gradientHor, gradientVert), min(gradientNE, gradientNW));
39 |
40 | result = WEIGHTS * max(3.f * gradientMin - gradientMax, 0.f);
41 | }
42 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_2_analysis_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D intermediate;
6 | uniform ivec2 outOffset;
7 | uniform int samplingFactor;
8 |
9 | // Out
10 | out vec4 analysis;
11 |
12 | #include load3x3v3
13 |
14 | void main() {
15 | ivec2 xy = samplingFactor * ivec2(gl_FragCoord.xy) + outOffset;
16 |
17 | // Load patch
18 | vec3[9] impatch = load3x3(xy, 2, intermediate);
19 |
20 | /**
21 | * STANDARD DEVIATIONS
22 | */
23 | vec3 mean, sigma;
24 | for (int i = 0; i < 9; i++) {
25 | mean += impatch[i];
26 | }
27 | mean /= 9.f;
28 | for (int i = 0; i < 9; i++) {
29 | vec3 diff = mean - impatch[i];
30 | sigma += diff * diff;
31 | }
32 |
33 | float z = texelFetch(intermediate, xy, 0).z;
34 | analysis = vec4(sqrt(sigma / 9.f), z);
35 | }
36 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_3_bilateral.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | // Bilateral filter
4 | precision mediump float;
5 |
6 | // Use buf to blur luma while keeping chroma.
7 | uniform sampler2D buf;
8 | uniform ivec2 bufSize;
9 |
10 | uniform vec2 sigma;
11 | uniform ivec2 radius;
12 |
13 | // Out
14 | out vec3 result;
15 |
16 | #include gaussian
17 |
18 | // Difference
19 | float fr(float diffi) {
20 | return unscaledGaussian(diffi, sigma.x);
21 | }
22 |
23 | // Distance
24 | float gs(float diffx) {
25 | //return 1.f / (diffx * diffx + 1.f);
26 | return unscaledGaussian(diffx, sigma.y);
27 | }
28 |
29 | vec3 xyYtoXYZ(vec3 xyY) {
30 | vec3 result = vec3(0.f, xyY.z, 0.f);
31 | if (xyY.y > 0.f) {
32 | result.x = xyY.x * xyY.z / xyY.y;
33 | result.z = (1.f - xyY.x - xyY.y) * xyY.z / xyY.y;
34 | }
35 | return result;
36 | }
37 |
38 | float pixDiff(vec3 pix1, vec3 pix2) {
39 | return distance(xyYtoXYZ(pix1), xyYtoXYZ(pix2));
40 | }
41 |
42 | /*
43 | float pixDiff(vec3 pix1, vec3 pix2, float noise) {
44 | // pix1 is input/output pixel position.
45 | float z = 8.f * mix(pix1.z, min(pix1.z, pix2.z), 0.25f);
46 | z *= max(0.f, 1.f - 5.f * noise);
47 | return length((pix2 - pix1) * vec3(z, z, 1.f));
48 | }
49 | */
50 |
51 | void main() {
52 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
53 |
54 | vec3 XYZCenter = texelFetch(buf, xyCenter, 0).xyz;
55 |
56 | ivec2 minxy = max(ivec2(0, 0), xyCenter - radius.x);
57 | ivec2 maxxy = min(bufSize - 1, xyCenter + radius.x);
58 |
59 | vec3 I = vec3(0.f);
60 | float W = 0.f;
61 |
62 | for (int y = minxy.y; y <= maxxy.y; y += radius.y) {
63 | for (int x = minxy.x; x <= maxxy.x; x += radius.y) {
64 | ivec2 xyPixel = ivec2(x, y);
65 |
66 | vec3 XYZPixel = texelFetch(buf, xyPixel, 0).xyz;
67 |
68 | vec2 dxy = vec2(xyPixel - xyCenter);
69 |
70 | float scale = fr(pixDiff(XYZCenter, XYZPixel)) * gs(length(dxy));
71 | I += XYZPixel * scale;
72 | W += scale;
73 | }
74 | }
75 |
76 | if (W < 0.0001f) {
77 | result = XYZCenter;
78 | } else {
79 | result = I / W;
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_3_median.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 |
7 | // Out
8 | out vec3 filtered;
9 |
10 | void main() {
11 | ivec2 xy = ivec2(gl_FragCoord.xy);
12 | float unfiltered[9];
13 | float tmp;
14 | int j;
15 |
16 | for (int i = 0; i < 9; i++) {
17 | tmp = texelFetch(buf, xy + ivec2((i % 3) - 1, (i / 3) - 1), 0).z;
18 | j = i;
19 | // Shift larger values forward, starting from the right.
20 | while (j > 0 && tmp < unfiltered[j - 1]) {
21 | unfiltered[j] = unfiltered[--j];
22 | }
23 | unfiltered[j] = tmp;
24 | }
25 |
26 | filtered.xy = texelFetch(buf, xy, 0).xy;
27 | filtered.z = unfiltered[4];
28 | }
29 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage2_4_merge_detail.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | //uniform sampler2D bilateral;
6 | uniform sampler2D intermediate;
7 |
8 | uniform sampler2D hist;
9 | uniform vec2 histOffset;
10 | uniform float histFactor;
11 | uniform float gamma;
12 |
13 | uniform sampler2D noiseTex;
14 |
15 | // Out
16 | out vec3 processed;
17 |
18 | void main() {
19 | ivec2 xy = ivec2(gl_FragCoord.xy);
20 |
21 | vec3 intermediateValXyz = texelFetch(intermediate, xy, 0).xyz;
22 | //vec3 bilateralValXyz = texelFetch(bilateral, xy, 0).xyz;
23 |
24 | float intermediateVal = intermediateValXyz.z;
25 | //float bilateralVal = bilateralValXyz.z;
26 |
27 | float z = intermediateVal;
28 | if (intermediateVal > 0.0001f) {
29 | // (Original Reflectance * Original Luminosity)
30 | // * (Corrected Luminosity / Original Luminosity)
31 |
32 | float texCoord = histOffset.x + histOffset.y * intermediateVal;
33 | float correctLuminanceHistEq = texture(hist, vec2(texCoord, 0.5f)).x;
34 |
35 | z *= pow(correctLuminanceHistEq / intermediateVal, histFactor);
36 | z = pow(z, gamma);
37 | }
38 |
39 | processed.xy = intermediateValXyz.xy;
40 | processed.z = clamp(z, 0.f, 1.f);
41 | }
42 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage3_1_noise_reduce_chroma_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | #define PI 3.1415926535897932384626433832795f
4 | #define hPI 1.57079632679489661923f
5 | #define qPI 0.785398163397448309616f
6 |
7 | precision mediump float;
8 |
9 | uniform sampler2D inBuffer;
10 | uniform ivec2 inBufferSize;
11 | uniform sampler2D noiseTex;
12 |
13 | out vec3 result;
14 |
15 | vec3[9] load3x3(ivec2 xy, int n, sampler2D buf) {
16 | vec3 outputArray[9];
17 | ivec2 xyPx;
18 | for (int i = 0; i < 9; i++) {
19 | xyPx = xy + n * ivec2((i % 3) - 1, (i / 3) - 1);
20 | xyPx = clamp(xyPx, ivec2(0), inBufferSize);
21 | outputArray[i] = texelFetch(buf, xyPx, 0).xyz;
22 | }
23 | return outputArray;
24 | }
25 |
26 | void main() {
27 | ivec2 xyPos = ivec2(gl_FragCoord.xy);
28 |
29 | vec3 noiseLevel = texelFetch(noiseTex, xyPos / 2, 0).xyz; // Sigma
30 | vec3 maxDiff = noiseLevel * 2.f;
31 |
32 | vec3[9] impatch = load3x3(xyPos, 2, inBuffer);
33 | vec3 mid = impatch[4];
34 |
35 | vec2 sum = mid.xy;
36 | int weight = 1;
37 |
38 | vec3 val, diff;
39 | for (int i = 0; i < 9; i++) {
40 | if (i != 4) {
41 | val = impatch[i];
42 | diff = abs(val - mid);
43 | if (diff.x < maxDiff.x && diff.y < maxDiff.y && diff.z < maxDiff.z) {
44 | sum += val.xy;
45 | weight += 1;
46 | }
47 | }
48 | }
49 |
50 | // Weight is never zero as mid is always included.
51 | result.xy = sum / float(weight);
52 | result.z = mid.z;
53 | }
54 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage3_1_noise_reduce_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D inBuffer;
6 | uniform sampler2D noiseTex;
7 | uniform ivec2 bufSize;
8 |
9 | uniform ivec2 radius;
10 | uniform vec2 sigma;
11 | uniform float blendY;
12 |
13 | out vec3 result;
14 |
15 | #include gaussian
16 |
17 | // Difference
18 | vec3 fr(vec3 diffi, vec3 s) {
19 | return unscaledGaussian(abs(diffi), sigma.y * s);
20 | }
21 |
22 | // Distance
23 | float gs(ivec2 diffxy) {
24 | return unscaledGaussian(length(vec2(diffxy.x, diffxy.y)), sigma.x);
25 | }
26 |
27 | void main() {
28 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
29 |
30 | vec3 XYZCenter = texelFetch(inBuffer, xyCenter, 0).xyz;
31 | vec3 noiseLevel = texelFetch(noiseTex, xyCenter / 4, 0).xyz;
32 |
33 | ivec2 minxy = max(ivec2(0, 0), xyCenter - radius.x);
34 | ivec2 maxxy = min(bufSize - 1, xyCenter + radius.x);
35 |
36 | vec3 I = vec3(0.f);
37 | float W = 0.f;
38 |
39 | ivec2 xyPixel;
40 | vec3 XYZPixel, XYZScale;
41 | float XYZScalef;
42 | for (int y = minxy.y; y <= maxxy.y; y += radius.y) {
43 | for (int x = minxy.x; x <= maxxy.x; x += radius.y) {
44 | xyPixel = ivec2(x, y);
45 | XYZPixel = texelFetch(inBuffer, xyPixel, 0).xyz;
46 |
47 | XYZScale = fr(XYZPixel - XYZCenter, noiseLevel) * gs(xyPixel - xyCenter);
48 | XYZScalef = length(XYZScale);
49 | I += XYZPixel * XYZScalef;
50 | W += XYZScalef;
51 | }
52 | }
53 |
54 | vec3 tmp;
55 | if (W < 0.0001f) {
56 | tmp = XYZCenter;
57 | } else {
58 | tmp = I / W;
59 | tmp.z = mix(tmp.z, XYZCenter.z, blendY);
60 | }
61 |
62 | // Desaturate noisy patches.
63 | tmp.xy = mix(tmp.xy, vec2(0.345703f, 0.358539f), min(0.01f * length(noiseLevel.xy) - 0.01f, 0.25f));
64 | result = tmp;
65 | }
66 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage3_1_noise_reduce_median_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | #define PI 3.1415926535897932384626433832795f
4 | #define hPI 1.57079632679489661923f
5 | #define qPI 0.785398163397448309616f
6 |
7 | precision mediump float;
8 |
9 | uniform sampler2D inBuffer;
10 |
11 | uniform int radiusDenoise;
12 |
13 | out vec3 result;
14 |
15 | #include load3x3v3
16 |
17 | void main() {
18 | ivec2 xyPos = ivec2(gl_FragCoord.xy);
19 |
20 | vec3[9] impatch = load3x3(xyPos, radiusDenoise, inBuffer);
21 |
22 | vec3 minTop = min(min(impatch[0], impatch[1]), impatch[2]);
23 | vec3 maxTop = max(max(impatch[0], impatch[1]), impatch[2]);
24 | vec3 medianTop = impatch[0] + impatch[1] + impatch[2] - minTop - maxTop;
25 |
26 | vec3 minMid = min(min(impatch[3], impatch[4]), impatch[5]);
27 | vec3 maxMid = max(max(impatch[3], impatch[4]), impatch[5]);
28 | vec3 medianMid = impatch[3] + impatch[4] + impatch[5] - minMid - maxMid;
29 |
30 | vec3 minBot = min(min(impatch[6], impatch[7]), impatch[8]);
31 | vec3 maxBot = max(max(impatch[6], impatch[7]), impatch[8]);
32 | vec3 medianBot = impatch[6] + impatch[7] + impatch[8] - minBot - maxBot;
33 |
34 | vec3 minVert = min(min(medianTop, medianMid), medianBot);
35 | vec3 maxVert = max(max(medianTop, medianMid), medianBot);
36 | result = medianTop + medianMid + medianBot - minVert - maxVert;
37 | return;
38 |
39 | /*
40 | // Take unfiltered xy and z as starting point.
41 | vec2 xy = mid.xy;
42 | float z = mid.z;
43 |
44 | // Calculate stddev for patch
45 | vec3 mean;
46 | for (int i = 0; i < 9; i++) {
47 | mean += impatch[i];
48 | }
49 | mean /= 9.f;
50 | vec3 sigmaLocal;
51 | for (int i = 0; i < 9; i++) {
52 | vec3 diff = mean - impatch[i];
53 | sigmaLocal += diff * diff;
54 | }
55 | sigmaLocal = max(sqrt(sigmaLocal / 9.f), sigma);
56 |
57 | vec3 minxyz = impatch[0].xyz, maxxyz = minxyz;
58 | for (int i = 1; i < 9; i++) {
59 | minxyz = min(minxyz, impatch[i]);
60 | maxxyz = max(maxxyz, impatch[i]);
61 | }
62 | float distxy = distance(minxyz.xy, maxxyz.xy);
63 | float distz = distance(minxyz.z, maxxyz.z);
64 |
65 | */
66 | /**
67 | CHROMA NOISE REDUCE
68 | **/
69 |
70 | /*
71 | // Thresholds
72 | float thExclude = 1.5f;
73 | float thStop = 2.25f;
74 |
75 | // Expand in a plus
76 | vec3 midDivSigma = mid / sigmaLocal;
77 | vec3 neighbour;
78 | vec3 sum = mid;
79 | int coord, bound, count, totalCount = 1, shiftFactor = 16;
80 | float dist;
81 |
82 | float lastMinAngle = 0.f;
83 | for (float radius = 1.f; radius <= float(radiusDenoise); radius += 1.f) {
84 | float expansion = radius * 2.f;
85 | float maxDist = 0.f;
86 |
87 | float minAngle = lastMinAngle;
88 | float minDist = thStop;
89 |
90 | // Four samples for every radius
91 | for (float angle = -hPI; angle < hPI; angle += qPI) {
92 | // Reduce angle as radius grows
93 | float realAngle = lastMinAngle + angle / pow(radius, 0.5f);
94 | ivec2 c = xyPos + ivec2(
95 | int(round(expansion * cos(realAngle))),
96 | int(round(expansion * sin(realAngle)))
97 | );
98 |
99 | // Don't go out of bounds
100 | if (c.x >= 0 && c.x < intermediateWidth && c.y >= 0 && c.y < intermediateHeight - 1) {
101 | neighbour = texelFetch(intermediateBuffer, c, 0).xyz;
102 | dist = distance(midDivSigma, neighbour / sigmaLocal);
103 | if (dist < minDist) {
104 | minDist = dist;
105 | minAngle = realAngle;
106 | }
107 | if (dist < thExclude) {
108 | sum += neighbour;
109 | totalCount++;
110 | }
111 | }
112 | }
113 | // No direction left to continue, stop averaging
114 | if (minDist >= thStop) {
115 | break;
116 | }
117 | // Keep track of the best angle
118 | lastMinAngle = minAngle;
119 | }
120 |
121 | float noise = texelFetch(noiseTex, xyPos, 0).x;
122 | result.xy = sum.xy / float(totalCount);
123 | result.z = mix(z, sum.z / float(totalCount), min(noise * 1.65f, 1.f));
124 | */
125 | }
126 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage3_1_noise_reduce_remove_noise_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D bufDenoisedHighRes;
6 | uniform sampler2D bufDenoisedMediumRes;
7 | uniform sampler2D bufDenoisedLowRes;
8 | uniform sampler2D bufNoisyMediumRes;
9 | uniform sampler2D bufNoisyLowRes;
10 | uniform sampler2D noiseTexMediumRes;
11 | uniform sampler2D noiseTexLowRes;
12 |
13 | out vec3 result;
14 |
15 | #include xyytoxyz
16 |
17 | void main() {
18 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
19 | vec3 highRes, mediumRes, lowRes;
20 |
21 | highRes = texelFetch(bufDenoisedHighRes, xyCenter, 0).xyz;
22 |
23 | mediumRes = texelFetch(bufDenoisedMediumRes, xyCenter, 0).xyz;
24 | mediumRes -= texelFetch(bufNoisyMediumRes, xyCenter, 0).xyz;
25 |
26 | lowRes = texelFetch(bufDenoisedLowRes, xyCenter, 0).xyz;
27 | lowRes -= texelFetch(bufNoisyLowRes, xyCenter, 0).xyz;
28 |
29 | mediumRes *= min(texelFetch(noiseTexMediumRes, xyCenter / 4, 0).xyz * 32.f, 1.f);
30 | lowRes *= min(texelFetch(noiseTexLowRes, xyCenter / 4, 0).xyz * 64.f, 1.f);
31 |
32 | result = xyYtoXYZ(highRes + mediumRes + lowRes);
33 | }
34 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage3_2_blur_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 | uniform ivec2 minxy;
7 | uniform ivec2 maxxy;
8 |
9 | uniform float sigma;
10 | uniform ivec2 radius;
11 |
12 | uniform ivec2 dir;
13 | uniform vec2 ch;
14 |
15 | // Out
16 | out float result;
17 |
18 | #include gaussian
19 |
20 | void main() {
21 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
22 |
23 | float I = 0.f;
24 | float W = 0.f;
25 |
26 | for (int i = -radius.x; i <= radius.x; i += radius.y) {
27 | ivec2 xy = xyCenter + i * dir;
28 | if (xy.x >= minxy.x && xy.y >= minxy.y && xy.x <= maxxy.x && xy.y <= maxxy.y) {
29 | float z = dot(ch, texelFetch(buf, xyCenter + i * dir, 0).xz);
30 | float scale = unscaledGaussian(float(i), sigma);
31 | I += z * scale;
32 | W += scale;
33 | }
34 | }
35 |
36 | result = I / W;
37 | }
38 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_0_blur_1ch_fs.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 | uniform ivec2 bufSize;
7 |
8 | uniform float sigma;
9 | uniform int radius;
10 | uniform ivec2 dir;
11 |
12 | out float result;
13 |
14 | #include gaussian
15 |
16 | void main() {
17 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
18 |
19 | float I = 0.f;
20 | float W = 0.f;
21 | float Y, scale;
22 |
23 | for (int i = -radius; i <= radius; i++) {
24 | ivec2 xy = xyCenter + i * dir;
25 | if (xy.x >= 0 && xy.y >= 0 && xy.x < bufSize.x && xy.y < bufSize.y) {
26 | Y = texelFetch(buf, xyCenter + i * dir, 0).x;
27 | scale = unscaledGaussian(float(i), sigma);
28 | I += Y * scale;
29 | W += scale;
30 | }
31 | }
32 |
33 | //result = I / W;
34 | result = texelFetch(buf, xyCenter, 0).x;
35 | }
36 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_1_doubleexpose.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 |
7 | uniform float factor;
8 |
9 | out float result;
10 |
11 | #include gamma
12 |
13 | void main() {
14 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
15 | float x = clamp(factor * texelFetch(buf, xyCenter, 0).z, 0.f, 1.f);
16 | result = gammaEncode(x);
17 | }
18 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_2_downsample.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 | uniform ivec2 maxxy;
7 |
8 | out float result;
9 |
10 | void main() {
11 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
12 | result = texelFetch(buf, min(xyCenter * 2, maxxy), 0).x;
13 | }
14 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_3_upsample.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 |
7 | out float result;
8 |
9 | void main() {
10 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
11 | ivec2 xyDownscaled = xyCenter / 2;
12 | ivec2 xyAlign = xyCenter % 2;
13 |
14 | // We always upsample from a texture that is larger or the proper size,
15 | // so do not worry about clamping coordinates.
16 | float topLeft = texelFetch(buf, xyDownscaled, 0).x;
17 | float topRight, bottomLeft;
18 | if (xyAlign.x == 1) {
19 | topRight = texelFetch(buf, xyDownscaled + ivec2(1, 0), 0).x;
20 | }
21 | if (xyAlign.y == 1) {
22 | bottomLeft = texelFetch(buf, xyDownscaled + ivec2(0, 1), 0).x;
23 | }
24 |
25 | // Linear interpolation over 2x upscaling is the same as bicubic or cosine interpolation,
26 | // as all are the same: x=0 -> y=0, x=0.5 -> y=0.5, x=1 -> y=1.
27 | // Therefore this should not introduce artifacts.
28 | int pxFour = 2 * xyAlign.y + xyAlign.x;
29 | switch (pxFour) {
30 | case 0: // TL
31 | result = topLeft;
32 | break;
33 | case 1: // TR
34 | result = (topLeft + topRight) * 0.5f;
35 | break;
36 | case 2: // BL
37 | result = (topLeft + bottomLeft) * 0.5f;
38 | break;
39 | case 3: // BR
40 | float bottomRight = texelFetch(buf, xyDownscaled + ivec2(1, 1), 0).x;
41 | result = (topLeft + topRight + bottomLeft + bottomRight) * 0.25f;
42 | break;
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_4_difference.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D target;
6 | uniform sampler2D base;
7 |
8 | out float result;
9 |
10 | void main() {
11 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
12 | result = texelFetch(target, xyCenter, 0).x - texelFetch(base, xyCenter, 0).x;
13 | }
14 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_5_merge.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | #define TARGET_Z 0.6f
4 | #define GAUSS_Z 0.5f
5 |
6 | precision mediump float;
7 |
8 | uniform bool useUpscaled;
9 | uniform sampler2D upscaled;
10 |
11 | // Weighting is done using these.
12 | uniform sampler2D gaussUnder;
13 | uniform sampler2D gaussOver;
14 |
15 | // Blending is done using these.
16 | uniform sampler2D blendUnder;
17 | uniform sampler2D blendOver;
18 |
19 | uniform int level;
20 |
21 | out float result;
22 |
23 | #include gaussian
24 | #include sigmoid
25 | #include gamma
26 |
27 | // From hdr-plus repo.
28 | float dist(float z) {
29 | return unscaledGaussian(z - TARGET_Z, GAUSS_Z);
30 | }
31 |
32 | void main() {
33 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
34 |
35 | // If this is the lowest layer, start with zero.
36 | float base = useUpscaled
37 | ? texelFetch(upscaled, xyCenter, 0).x
38 | : 0.f;
39 |
40 | // How are we going to blend these two?
41 | float blendUnderVal = texelFetch(blendUnder, xyCenter, 0).x;
42 | float blendOverVal = texelFetch(blendOver, xyCenter, 0).x;
43 |
44 | // Look at result to compute weights.
45 | float gaussUnderVal = texelFetch(gaussUnder, xyCenter, 0).x;
46 | float gaussOverVal = texelFetch(gaussOver, xyCenter, 0).x;
47 |
48 | float gaussUnderValDev = dist(gaussUnderVal);
49 | float gaussOverValDev = dist(gaussOverVal);
50 |
51 | float blend = gaussOverValDev / (gaussUnderValDev + gaussOverValDev); // [0, 1]
52 | float blendVal = mix(blendUnderVal, blendOverVal, blend);
53 |
54 | blendVal *= max(1.f, 1.5f - 0.05f * float(level));
55 |
56 | float res = base + blendVal;
57 | if (level == 0) {
58 | res = gammaDecode(res);
59 | }
60 | result = res;
61 | }
62 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_6_xyz_to_xyy.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 |
7 | out vec3 result;
8 |
9 | #include xyztoxyy
10 |
11 | void main() {
12 | ivec2 xyCenter = ivec2(gl_FragCoord.xy);
13 | result = XYZtoxyY(texelFetch(buf, xyCenter, 0).xyz);
14 | }
15 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_7_nr_intermediate.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 | uniform ivec2 bufEdge;
7 | uniform float blendY;
8 | uniform vec2 sigma;
9 |
10 | out vec3 result;
11 |
12 | #include gaussian
13 |
14 | // Difference
15 | vec3 fr(vec3 diffi) {
16 | return unscaledGaussian(abs(diffi), sigma.y);
17 | }
18 |
19 | // Distance
20 | float gs(ivec2 diffxy) {
21 | return unscaledGaussian(length(vec2(diffxy.x, diffxy.y)), sigma.x);
22 | }
23 |
24 | vec3[9] load3x3(ivec2 xy) {
25 | vec3 outputArray[9];
26 | ivec2 xyPx;
27 | for (int i = 0; i < 9; i++) {
28 | xyPx = xy + 2 * ivec2((i % 3) - 1, (i / 3) - 1);
29 | xyPx = clamp(xyPx, ivec2(0), bufEdge);
30 | outputArray[i] = texelFetch(buf, xyPx, 0).xyz;
31 | }
32 | return outputArray;
33 | }
34 |
35 | void main() {
36 | ivec2 xyPos = ivec2(gl_FragCoord.xy);
37 |
38 | vec3[9] impatch = load3x3(xyPos);
39 |
40 | vec3 I = vec3(0.f);
41 | float W = 0.f;
42 |
43 | ivec2 xyPixelDiff;
44 | vec3 XYZPixel, XYZScale;
45 | float XYZScalef;
46 | for (int i = 0; i < 9; i++) {
47 | xyPixelDiff.x = (i % 3) - 1;
48 | xyPixelDiff.y = (i / 3) - 1;
49 | XYZPixel = impatch[i];
50 | XYZScale = fr(XYZPixel - impatch[4]) * gs(xyPixelDiff);
51 | XYZScalef = length(XYZScale);
52 | I += XYZPixel * XYZScalef;
53 | W += XYZScalef;
54 | }
55 |
56 | vec3 tmp;
57 | if (W < 0.0001f) {
58 | tmp = impatch[4];
59 | } else {
60 | tmp = I / W;
61 | tmp.z = mix(tmp.z, impatch[4].z, blendY);
62 | }
63 |
64 | result = tmp;
65 | }
66 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_8_nr_zero.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D buf;
6 | uniform ivec2 bufEdge;
7 |
8 | uniform float blendY;
9 |
10 | out vec3 result;
11 |
12 | vec3[9] load3x3(ivec2 xy) {
13 | vec3 outputArray[9];
14 | ivec2 xyPx;
15 | for (int i = 0; i < 9; i++) {
16 | xyPx = xy + ivec2((i % 3) - 1, (i / 3) - 1);
17 | xyPx = clamp(xyPx, ivec2(0), bufEdge);
18 | outputArray[i] = texelFetch(buf, xyPx, 0).xyz;
19 | }
20 | return outputArray;
21 | }
22 |
23 | void main() {
24 | ivec2 xyPos = ivec2(gl_FragCoord.xy);
25 |
26 | vec3[9] impatch = load3x3(xyPos);
27 |
28 | vec3 minTop = min(min(impatch[0], impatch[1]), impatch[2]);
29 | vec3 maxTop = max(max(impatch[0], impatch[1]), impatch[2]);
30 | vec3 medianTop = impatch[0] + impatch[1] + impatch[2] - minTop - maxTop;
31 |
32 | vec3 minMid = min(min(impatch[3], impatch[4]), impatch[5]);
33 | vec3 maxMid = max(max(impatch[3], impatch[4]), impatch[5]);
34 | vec3 medianMid = impatch[3] + impatch[4] + impatch[5] - minMid - maxMid;
35 |
36 | vec3 minBot = min(min(impatch[6], impatch[7]), impatch[8]);
37 | vec3 maxBot = max(max(impatch[6], impatch[7]), impatch[8]);
38 | vec3 medianBot = impatch[6] + impatch[7] + impatch[8] - minBot - maxBot;
39 |
40 | vec3 minVert = min(min(medianTop, medianMid), medianBot);
41 | vec3 maxVert = max(max(medianTop, medianMid), medianBot);
42 |
43 | vec3 tmp = medianTop + medianMid + medianBot - minVert - maxVert;
44 | result.xy = tmp.xy;
45 | result.z = mix(tmp.z, impatch[4].z, blendY);
46 | }
47 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/stage4_9_combine_z.glsl:
--------------------------------------------------------------------------------
1 | #version 300 es
2 |
3 | precision mediump float;
4 |
5 | uniform sampler2D bufChroma;
6 | uniform sampler2D bufLuma;
7 |
8 | out vec3 result;
9 |
10 | void main() {
11 | ivec2 xyPos = ivec2(gl_FragCoord.xy);
12 |
13 | result.xy = texelFetch(bufChroma, xyPos, 0).xy;
14 | result.z = texelFetch(bufLuma, xyPos, 0).x;
15 | }
16 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #fff5f5f5
4 | #ffe0e0e0
5 | #ff4285f4
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/config.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | ]- @string/post_processing_disabled
5 | - @string/post_processing_natural
6 | - @string/post_processing_boosted
7 |
8 |
9 |
10 | - Disabled
11 | - Natural
12 | - Boosted
13 |
14 |
15 | Natural
16 |
17 | true
18 | false
19 | false
20 | DCIM/Processed
21 |
22 | - 1
23 | - 0.975
24 |
25 | true
26 | true
27 | true
28 | true
29 | true
30 | true
31 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | DNG Processor
3 |
4 | Features
5 | Select manually
6 | Pick files to process
7 | Reprocess
8 | The last processed image
9 |
10 | Files
11 | Background processing
12 | Automatically detect new files
13 | Delete original
14 | Remove files that were successfully processed
15 | Add suffix
16 | Append DNGP to the new file name
17 | Save directory
18 |
19 | Saturation map
20 | Tweak intensity per hue
21 | Red - Skin tones
22 | Yellow
23 | Green - Grass
24 | Cyan - Foliage
25 | Blue - Water
26 | Indigo - Skies
27 | Violet
28 | Magenta
29 | Soft limit
30 | Reset
31 | Restore to default values
32 |
33 | Pipeline
34 | Post processing mode
35 | Disabled
36 | Natural
37 | Boosted
38 | Noise reduction
39 | Removes chromatic irregularities
40 | Exposure fusion
41 | Compresses dynamic range
42 | Local contrast enhancement
43 | Sharpens details for a crunchier look
44 | Adaptive histogram equalization
45 | Improves visibility in high dynamic range scenes
46 | Forward matrix
47 | Device specific tuning
48 | Gain map
49 | Improves dynamic range
50 |
51 | Other
52 | Donate
53 | Support my work
54 | https://play.google.com/store/apps/details?id=amirz.donation
55 |
56 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings_prefs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | pref_reprocess
4 | pref_manual_select
5 |
6 | pref_background_process
7 | pref_delete_original
8 | pref_suffix
9 | pref_save_path
10 |
11 | pref_saturation_mult
12 | pref_saturation_r
13 | pref_saturation_y
14 | pref_saturation_g
15 | pref_saturation_c
16 | pref_saturation_b
17 | pref_saturation_i
18 | pref_saturation_v
19 | pref_saturation_m
20 | pref_saturation_limit
21 | pref_saturation_reset
22 |
23 | pref_post_process
24 | pref_noise_reduce
25 | pref_expose_fuse
26 | pref_lce
27 | pref_ahe
28 | pref_forward_matrix
29 | pref_gain_map
30 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
10 |
11 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/app/src/test/java/amirz/dngprocessor/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package amirz.dngprocessor;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 |
5 | repositories {
6 | google()
7 | jcenter()
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:4.0.1'
11 |
12 |
13 | // NOTE: Do not place your application dependencies here; they belong
14 | // in the individual module build.gradle files
15 | }
16 | }
17 |
18 | allprojects {
19 | repositories {
20 | google()
21 | jcenter()
22 | }
23 | }
24 |
25 | task clean(type: Delete) {
26 | delete rootProject.buildDir
27 | }
28 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/amirzaidi/DNGProcessor/29a188be16b1e7ba48fd953a73f0f3a03c681a77/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Sat Aug 08 23:46:27 CEST 2020
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------