├── packages.txt
├── DR_DC_APP
├── Images
│ ├── DR.png
│ ├── DR2.png
│ ├── comp.png
│ ├── DR_old.png
│ ├── desur.png
│ ├── Composite.png
│ ├── Join_Option.jpg
│ ├── Compositing_Example.jpg
│ └── directional_survey.png
├── 01_🧰_Desurveying_Compositing.py
└── pages
│ ├── 03_🧭_Desurveying.py
│ └── 02_⚖️_Compositing.py
├── __pycache__
├── st_utils.cpython-38.pyc
├── st_utils.cpython-311.pyc
├── desurveying.cpython-311.pyc
├── dr_composititng.cpython-38.pyc
├── dr_desurveying.cpython-38.pyc
├── drillhole_utils.cpython-311.pyc
├── compositing_utils.cpython-311.pyc
├── compositing_utils.cpython-38.pyc
├── desurveying_utils.cpython-311.pyc
├── point_compositing.cpython-311.pyc
├── point_compositing.cpython-38.pyc
├── interval_compositing.cpython-311.pyc
├── interval_compositing.cpython-38.pyc
├── dr_composititng_archive.cpython-38.pyc
└── dr_desurveying_archive.cpython-311.pyc
├── .gitignore
├── requirements.txt
├── .streamlit
└── config.toml
├── README.md
├── st_utils.py
├── drillhole_utils.py
├── Example_Data
└── Galore_Creek
│ ├── GCMC_2019_Collar.csv
│ ├── GCMC_2019_Survey.csv
│ └── GCMC_2019_SpecificGravity.csv
├── LICENCE
├── desurveying.py
├── desurveying_utils.py
├── interval_compositing.py
└── point_compositing.py
/packages.txt:
--------------------------------------------------------------------------------
1 | libgl1
2 | libxrender1
--------------------------------------------------------------------------------
/DR_DC_APP/Images/DR.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/DR.png
--------------------------------------------------------------------------------
/DR_DC_APP/Images/DR2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/DR2.png
--------------------------------------------------------------------------------
/DR_DC_APP/Images/comp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/comp.png
--------------------------------------------------------------------------------
/DR_DC_APP/Images/DR_old.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/DR_old.png
--------------------------------------------------------------------------------
/DR_DC_APP/Images/desur.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/desur.png
--------------------------------------------------------------------------------
/DR_DC_APP/Images/Composite.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/Composite.png
--------------------------------------------------------------------------------
/DR_DC_APP/Images/Join_Option.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/Join_Option.jpg
--------------------------------------------------------------------------------
/__pycache__/st_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/st_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/st_utils.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/st_utils.cpython-311.pyc
--------------------------------------------------------------------------------
/DR_DC_APP/Images/Compositing_Example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/Compositing_Example.jpg
--------------------------------------------------------------------------------
/DR_DC_APP/Images/directional_survey.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/DR_DC_APP/Images/directional_survey.png
--------------------------------------------------------------------------------
/__pycache__/desurveying.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/desurveying.cpython-311.pyc
--------------------------------------------------------------------------------
/__pycache__/dr_composititng.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/dr_composititng.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/dr_desurveying.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/dr_desurveying.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/drillhole_utils.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/drillhole_utils.cpython-311.pyc
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | __pycache__/drillhole_utils.cpython-38.pyc
3 | DR_DC_APP/.DS_Store
4 | Example_Data/.DS_Store
5 | Example_Data/Galore_Creek/.DS_Store
6 |
--------------------------------------------------------------------------------
/__pycache__/compositing_utils.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/compositing_utils.cpython-311.pyc
--------------------------------------------------------------------------------
/__pycache__/compositing_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/compositing_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/desurveying_utils.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/desurveying_utils.cpython-311.pyc
--------------------------------------------------------------------------------
/__pycache__/point_compositing.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/point_compositing.cpython-311.pyc
--------------------------------------------------------------------------------
/__pycache__/point_compositing.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/point_compositing.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/interval_compositing.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/interval_compositing.cpython-311.pyc
--------------------------------------------------------------------------------
/__pycache__/interval_compositing.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/interval_compositing.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/dr_composititng_archive.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/dr_composititng_archive.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/dr_desurveying_archive.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Solve-Geosolutions/DR_DesurvComp_APP/main/__pycache__/dr_desurveying_archive.cpython-311.pyc
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | matplotlib==3.8.2
2 | numpy==1.26.2
3 | pandas==2.1.3
4 | Pillow==10.1.0
5 | plotly==5.18.0
6 | streamlit==1.29.0
7 | vtk==9.1.0
8 | scipy==1.11.4
9 | multiprocess==0.70.15
--------------------------------------------------------------------------------
/.streamlit/config.toml:
--------------------------------------------------------------------------------
1 | [theme]
2 | primaryColor="#94b8b7"
3 | backgroundColor="#FFFFFF"
4 | secondaryBackgroundColor="#E9DFD2"
5 | textColor="#333333"
6 | font="sans serif"
7 |
8 | [deprecation]
9 | # Set to false to disable the deprecation warning for using the global pyplot
10 | # instance.
11 | # Default: true
12 | showPyplotGlobalUse = false
13 | showWarningOnDirectExecution = false
14 |
15 | [client]
16 | showErrorDetails = false
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://www.datarock.com.au)
2 |
3 | # Drillhole Desurveying & Compositing Application
4 | This open source applications provides an interface to desurvey and composite drillhole data. Compositing (or joining) drillhole data is important to analyse the relationships between downhole datasets. These datasets may include logged lithology/stratigraphy, geotechnical information, chemical assays and downhole geophysics The application provides several options for compositing drillhole data. Desurveying is the process of converting drillhole collar and survey information into downhole X, Y, Z geographical locations. It requires two files to run, firstly, a drillhole collar file containing the Hole ID, Eastings, Northings, Relative Sea Level (RL) and Total Depths (TD) and secondly a survey file containing the Hole ID, Survey Depth, Azimuth and Dip/Inclination.
5 |
6 | ## [CLICK HERE to access the application.](https://solve-geosolutions-dr-dc-app01--desurveying-compositing-gekwje.streamlit.app)
7 |
8 | 
--------------------------------------------------------------------------------
/DR_DC_APP/01_🧰_Desurveying_Compositing.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | from streamlit.logger import get_logger
3 | from PIL import Image
4 |
5 | import sys
6 |
7 | sys.path.append(r"./")
8 |
9 | import pandas as pd
10 | import numpy as np
11 |
12 | from st_utils import *
13 |
14 | LOGGER = get_logger(__name__)
15 |
16 | st.set_page_config(
17 | page_title="Desurveying and Compositing",
18 | page_icon="🧰",
19 | layout="wide",
20 | )
21 |
22 | add_logo()
23 |
24 | st.sidebar.markdown("*This web application proudly brought to you by **[Datarock](https://www.datarock.com.au)***")
25 |
26 | # from drillhole_utils import *
27 |
28 | # image = Image.open("DR_DC_APP/Images/DR.png").resize((600, 200))
29 | # st.image(image)
30 |
31 | st.write(
32 | """# Desurveying and Compositing Application
33 | This application was developed by the Datarock Applied Science team and provides functionality to desurvey and composite drillhole data"""
34 | )
35 |
36 | st.markdown(
37 | """#### Compositing
38 | Compositing drillhole data is the process of standardising downhole data into single interval lengths. The application can handle both categorical and numerical downhole values provided in both point and interval format.
39 | Several statistical options are provided to calculate information across intervals, including maximum overlap, overlap weight, count of intervals for categorical data and weighted average, maximum, minimum and range of values for numerical data."""
40 | )
41 |
42 | image = Image.open("DR_DC_APP/Images/Composite.png")
43 | st.image(
44 | image,
45 | caption="Figure 1. An example of data composited from 3m intervals (left adjacent) to 10m intervals (right adjacent).",
46 | )
47 |
48 | st.markdown(
49 | """#### Desurveying
50 | Desurveying is the process of calculating the XYZ geographical location of sample points down a drillhole trace from the collar location (Eastings, Northings and RL) and survey data (Dip and Azimuth).
51 | Two desurveying methods are provided within this application, tangential or minimum curvature (recommended), more information on the differences between these methods are provided in the links."""
52 | )
53 |
54 | image = Image.open("DR_DC_APP/Images/directional_survey.png")
55 | st.image(
56 | image,
57 | caption="Figure 2. An example of survey data being converted to XYZ Geogrpahical space (sourced from AGILE (https://agilescientific.com/blog/2021/4/29/survival-of-the-fittest-or-overcrowding))",
58 | )
59 |
60 | st.markdown(
61 | """Additional links:
62 | - [SEEQUENT - The Dark Art of Drillhole Desurveying](https://www.seequent.com/the-dark-art-of-drillhole-desurveying/)
63 | - [PyGSLIB](https://opengeostat.github.io/pygslib/Tutorial.html#desurveying)
64 | - [AGILE - Additional Open Source Options](https://agilescientific.com/blog/2021/4/29/survival-of-the-fittest-or-overcrowding)
65 |
66 | The code used in this application has been adapted from existing open source repositories [PyGSLIB](https://github.com/opengeostat/pygslib) and [wellpathpy](https://github.com/Zabamund/wellpathpy).
67 | """
68 | )
69 |
70 | st.markdown("### Contact us")
71 | st.markdown("For any questions or comments, please contact info@datarock.com.au")
72 |
--------------------------------------------------------------------------------
/st_utils.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import difflib
3 | from difflib import get_close_matches
4 | import pandas as pd
5 |
6 | ##### CREATE FUNCTION TO CHECK FOR DROP DOWN MATCHES
7 | def get_close_matches_icase(word, possibilities, *args, **kwargs):
8 | """Case-insensitive version of difflib.get_close_matches"""
9 | lword = word.lower()
10 | lpos = {p.lower(): p for p in possibilities}
11 | lmatches = difflib.get_close_matches(lword, lpos.keys(), *args, **kwargs)
12 | return [lpos[m] for m in lmatches]
13 |
14 |
15 | def check_streamlit_holes(dfi, df0):
16 | # Checking Missing Holes
17 | chole_list = dfi["HOLEID"].unique()
18 | shole_list = df0["HOLEID"].unique()
19 |
20 | cgot = [h for h in chole_list if h not in shole_list]
21 | sgot = [h for h in shole_list if h not in chole_list]
22 |
23 | cgot_s = ""
24 | for i in cgot:
25 | cgot_s += "- " + str(i) + "\n"
26 |
27 | sgot_s = ""
28 | for i in sgot:
29 | sgot_s += "- " + str(i) + "\n"
30 |
31 | col1, col2 = st.columns(2)
32 |
33 | if cgot_s == "":
34 | col1.write("All Interval holes have Interval data ✅ ")
35 | else:
36 | col1.write("The following holes are missing Interval data 😭")
37 | col1.markdown(cgot_s)
38 |
39 | if sgot_s == "":
40 | col2.write("All Compositing holes are present ✅ ")
41 | else:
42 | col2.write("The following compositing holes are missing 😭")
43 | col2.markdown(sgot_s)
44 |
45 |
46 | def check_streamlit_desurvey_holes(cdf, sdf):
47 | # Checking Missing Holes
48 | chole_list = cdf["HOLEID"].unique()
49 | shole_list = sdf["HOLEID"].unique()
50 |
51 | cgot = [h for h in chole_list if h not in shole_list]
52 | sgot = [h for h in shole_list if h not in chole_list]
53 |
54 | cgot_s = ""
55 | for i in cgot:
56 | cgot_s += "- " + str(i) + "\n"
57 |
58 | sgot_s = ""
59 | for i in sgot:
60 | sgot_s += "- " + str(i) + "\n"
61 |
62 | col1, col2 = st.columns(2)
63 | col1.write(
64 | """
65 | ##### Collar Holes without Survey Data
66 | """
67 | )
68 | if cgot_s == "":
69 | col1.write("All collar holes have survey data ✅ ")
70 | else:
71 | col1.write("The following holes are missing survey data 😭")
72 | col1.markdown(cgot_s)
73 |
74 | col2.write(
75 | """
76 | ##### Survey Holes without Collar Data
77 | """
78 | )
79 | if sgot_s == "":
80 | col2.write("All survey holes have collar data ✅ ")
81 | else:
82 | col2.write("The following holes are missing collar data 😭")
83 | col2.markdown(sgot_s)
84 |
85 |
86 | def add_logo():
87 | st.markdown(
88 | """
89 |
97 | """,
98 | unsafe_allow_html=True,
99 | )
100 |
--------------------------------------------------------------------------------
/drillhole_utils.py:
--------------------------------------------------------------------------------
1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/01_drillhole_utils.ipynb.
2 |
3 | # %% auto 0
4 | __all__ = ['nanaverage', 'myround', 'tunafloor', 'tunaceil', 'round_depth', 'round_from_to', 'generate_fromto', 'rgb2hex']
5 |
6 | # %% ../nbs/01_drillhole_utils.ipynb 2
7 | import pandas as pd
8 | import numpy as np
9 | import math
10 |
11 | # %% ../nbs/01_drillhole_utils.ipynb 3
12 | def nanaverage(
13 | A: np.array, # 1D input array of length M of values to calculate weighted average of
14 | weights: np.array, # 1D input array of length M containing the weights to average by
15 | )->float: # Single value output of weighted average
16 | """This code calculates the weighted average of an array, ignoring any NaN values.
17 | It takes two inputs: A, which is a 1D array of length M containing the values to calculate the weighted
18 | average of, and weights, which is a 1D array of length M containing the weights to average by.
19 | The output is a single value representing the weighted average. The code first converts A into an array of floats,
20 | then calculates the weighted average by summing up the product of A and weights and dividing it by the sum of
21 | all non-NaN values multiplied by their respective weights."""
22 | A = np.transpose(A.values).astype(float)
23 | return np.nansum(A * weights, axis=1) / ((~np.isnan(A)) * weights).sum(axis=1)
24 |
25 | # %% ../nbs/01_drillhole_utils.ipynb 4
26 | def myround(
27 | x: float, # Value to round
28 | base: float, # Value to round by
29 | )->float: # Rounded value
30 | """This code defines a function called myround that takes two arguments, x and base.
31 | It returns a float value that is the result of rounding x to the nearest multiple of base."""
32 | return base * round(x / base)
33 |
34 | # %% ../nbs/01_drillhole_utils.ipynb 5
35 | def tunafloor(
36 | x: float, # Value to get floored value of
37 | base: float, # Value to round to
38 | )->float: # Output floored value
39 | """This code defines a function called 'tunafloor' which takes two arguments, x and base.
40 | The function uses the math.floor() method to round the value of x down to the nearest multiple of base and returns that value."""
41 | return base * math.floor(x / base)
42 |
43 | # %% ../nbs/01_drillhole_utils.ipynb 6
44 | def tunaceil(
45 | x: float, # Value to get ceiling value of
46 | base: float, # Value to round to
47 | )->float: # Output ceiling value
48 | """This code defines a function called 'tunafloor' which takes two arguments, x and base.
49 | The function uses the math.floor() method to round the value of x down to the nearest multiple
50 | of base and returns that value."""
51 | return base * math.ceil(x / base)
52 |
53 | # %% ../nbs/01_drillhole_utils.ipynb 7
54 | def round_depth(
55 | df: pd.DataFrame(), # Dataframe containing the data to be rounded
56 | depth: str, # Column name containing the 'from' depths
57 | round_value: int=4 # Value to round data to
58 | )->pd.DataFrame:
59 | """This function that takes in a containing drillhole point data and rounds the values in the specified
60 | column to the specified round value. The function returns the dataframe with the rounded values."""
61 |
62 | # Round the data to set decimal points
63 | df[depth] = np.round(df[depth].astype(float), round_value)
64 |
65 | return df
66 |
67 | # %% ../nbs/01_drillhole_utils.ipynb 10
68 | def round_from_to(
69 | df: pd.DataFrame(), # Dataframe containing the data to be rounded
70 | fro: str, # Column name containing the 'from' depths
71 | to: str, # Colunm name containing the 'to' depths
72 | round_value: int=4
73 | )->pd.DataFrame:
74 | """This code takes in a dataframe, two column names (fro and to) and an optional round_value parameter.
75 | It then rounds the values in the columns specified by fro and to to the number of decimal points specified by round_value (defaults to 4).
76 | Finally, it returns the dataframe with the rounded values."""
77 |
78 | # Round the data to set decimal points
79 | df[fro] = np.round(df[fro].astype(float), round_value)
80 | df[to] = np.round(df[to].astype(float), round_value)
81 |
82 | return df
83 |
84 | # %% ../nbs/01_drillhole_utils.ipynb 13
85 | def generate_fromto(
86 | df: pd.DataFrame, # Dataframe containing the survey and collar data
87 | TD: str, # Name of the column containing the total depth column
88 | interval: float, # The value to interval the data at
89 | )-> pd.DataFrame:
90 | """This function takes in a dataframe containing survey and collar data, the name of the
91 | column containing the total depth column, and an interval value. It then generates bins
92 | from the minimum and maximum depths and interval size, and returns a dataframe containing
93 | the data with columns "FROM" and "TO"."""
94 |
95 | # Generate bins from the minimum and maximum depths and interval size
96 | bins = np.array(range(0, int(math.ceil(max(df[TD])) * 100000), int(interval * 100000))) / 100000
97 |
98 | # Generate dataframe containing the data
99 | subdf = pd.DataFrame(np.array([bins, bins + interval]).T, columns=["FROM", "TO"])
100 |
101 | return subdf
102 |
103 | # %% ../nbs/01_drillhole_utils.ipynb 15
104 | def rgb2hex(
105 | rgb: tuple, # tuple or list of an (R, G, B) value
106 | )->str:
107 | """Converts rgb list/tuple (float, float, float) into hex (e.g. #ffffff)"""
108 | r = rgb[0]
109 | g = rgb[1]
110 | b = rgb[2]
111 | return "#{:02x}{:02x}{:02x}".format(r, g, b)
112 |
--------------------------------------------------------------------------------
/Example_Data/Galore_Creek/GCMC_2019_Collar.csv:
--------------------------------------------------------------------------------
1 | HOLEID,HOLETYPE,DH_YEAR,DH_PROGRAM,DH_AREA,GRIDNAME,EAST,NORTH,RL,DEPTH,DH_AZIMUTH,DH_DIP,DH_HOLE_PURPOSE,DH_DRILL_TYPE,DH_CONTRACTOR,DH_RIG_ID,STARTDATE,ENDDATE,DH_BRKN_STK_DEPTH_m,DH_DEPTH_REDUCED_m,DH_TOP_PVC_ELEV_m,DH_ORIGINAL_HOLE,DH_PLAN_HOLEID,DH_PLAN_AZIMUTH,DH_PLAN_DIP,DH_PLAN_DEPTH_m,DH_COL_SURVMETHOD,DH_COL_SURVBY,DH_COL_SURVDATE,DH_SURV_OBS,DH_LOGGED_DATE,DH_LOGGER,DH_HOLE_SUMMARY,DH_OBS,DH_RELOG_OBS,DH_STATUS
2 | GC19-0909,DRILLHOLE,2019,GCMC 2019,CZSGL,UTM_NAD83_09N,350811.974,6334176.262,788.49,900,112,-56,MET,DD,Hy-Tech,Drill 1,22-May-19,6-Jun-19,190,,,,Met-CB-01,110,-55,800,Devisight,Hy-Tech,22-May-19,,13-Jun-19,Keith Roberts,,,,COMPLETE
3 | GC19-0910,DRILLHOLE,2019,GCMC 2019,CZNGL,UTM_NAD83_09N,351355,6335560,721,510,89,-67,MET,DD,Hy-Tech,Drill 2,25-May-19,31-May-19,140.5,,,,Met_CN_05,88,-67,475,Devisight,Hy-Tech,25-May-19,,17-Jun-19,Keith Roberts,,Lauren Foiles logged Lith; Alt; Min; and Struct to 125.45m.,,COMPLETE
4 | GC19-0911,DRILLHOLE,2019,GCMC 2019,JNN,UTM_NAD83_09N,349784.349,6336450.427,1266.39,390,286,-69,Resource,DD,Hy-Tech,Drill 4,27-May-19,31-May-19,234.3,,,,Res_Jcn_007,286,-69,350,Devisight,Hy-Tech,27-May-19,,10-Jun-19,Goksu Erbalaban,,The high intensity of overprinting alteration; in addition to brecciation and/or pseudo-brecciation made identification of volcanic units difficult; thus the undifferentiated intermediate volcanic (V4) code was used throughout. Without being able to decipher individual volcanic units; relationships between individual volcanic units or overturning of beds could not be made.,,COMPLETE
5 | GC19-0912,DRILLHOLE,2019,GCMC 2019,CZRPL,UTM_NAD83_09N,351177.782,6334826.99,694.44,552,85,-65,MET,DD,Hy-Tech,Drill 4,30-May-19,7-Jun-19,116.91,,,,Met-CS-12,85,-65,550,Devisight,Hy-Tech,29-May-19,Azm done by compass as narrow canyon limited GPS accuracy,8-Jun-19,Melissa Zack,,,,COMPLETE
6 | GC19-0913,DRILLHOLE,2019,GCMC 2019,JNN,UTM_NAD83_09N,349786.754,6336447.452,1265.28,330,128,-62,Exploration,DD,Hy-Tech,Drill 3,1-Jun-19,4-Jun-19,222.17,,,,Res_Jcn_08,128,-62,250,Devisight,Hy-Tech,1-Jun-19,,30-Jul-19,Lauren Foiles,,blocks 207 and 210 and 213 are moved forward making the recovery at 100% per run. initial block placement was misplaced.,,COMPLETE
7 | GC19-0914,DRILLHOLE,2019,GCMC 2019,CZRPL,UTM_NAD83_09N,351374.887,6335287.907,712.96,525,88,-73,MET,DD,Hy-Tech,Drill 2,31-May-19,7-Jun-19,102,,,,MET_CN-04,88,-73,525,Devisight,Hy-Tech,1-Jun-19,,21-Jun-19,Raja Yarra,,,,COMPLETE
8 | GC19-0915,DRILLHOLE,2019,GCMC 2019,MDLCK,UTM_NAD83_09N,350246.54,6335191.048,937.42,384,267,-78,MET,DD,Hy-Tech,Drill 3,5-Jun-19,10-Jun-19,150,150,,,MET_MC_06,268,-78,200,Devisight,Hy-Tech,5-Jun-19,,13-Jun-19,Well-Shen Lee,,,,COMPLETE
9 | GC19-0916,DRILLHOLE,2019,GCMC 2019,CZSGL,UTM_NAD83_09N,350773.151,6334056.264,805.66,875,108,-57,MET,DD,Hy-Tech,Drill 1,7-Jun-19,22-Jun-19,306,,,,MET_CB-02,108,-57,875,Devisight,Hy-Tech,7-Jun-19,All surveys complete and data entered; some paper logs missing,7-Jul-19,Dave Zeko,,Numerous block errors below 288m; identified by rod count. Block 117 repeated twice over 6m; logging between 117 and 216 m is offset downhole by 3m; ie logging at 120m is at 117m in reality. Box labels reflect incorrect depths; however; blocks are correct.,,COMPLETE
10 | GC19-0917,DRILLHOLE,2019,GCMC 2019,CZRPL,UTM_NAD83_09N,351177.802,6334826.974,694.38,561,85,-85,Resource,DD,Hy-Tech,Drill 4,7-Jun-19,15-Jun-19,90,,,,Res_CEN_012,85,-85,561,Devisight,Hy-Tech,7-Jun-19,,4-Aug-19,Raja Yarra,,,,COMPLETE
11 | GC19-0918,DRILLHOLE,2019,GCMC 2019,CZRPL,UTM_NAD83_09N,351258.191,6335377.633,729.27,825,0,-90,Resource,DD,Hy-Tech,Drill 3,11-Jun-19,21-Jun-19,90,,,,RES-CEN_004,117,-90,700,Devisight,Hy-Tech,8-Jun-19,,12-Jul-19,Well-Shen Lee,,,,COMPLETE
12 | GC19-0919,DRILLHOLE,2019,GCMC 2019,WF,UTM_NAD83_09N,350774.208,6332990.293,793.72,450,267,-75,MET,DD,Hy-Tech,Drill 3,12-Jun-19,16-Jun-19,24,,,,MET_WF-10,268,-75,450,Devisight,Hy-Tech,23-May-19,,2-Jul-19,Well-Shen Lee,,,,COMPLETE
13 | GC19-0920,DRILLHOLE,2019,GCMC 2019,JNN,UTM_NAD83_09N,349657.066,6336156.672,1203.32,270,88,-65,MET,DD,Hy-Tech,Drill 4,18-Jun-19,21-Jun-19,140.75,,,,MET-JN-07,88,-65,275,Devisight,Hy-Tech,16-May-19,David Zeko sighted drillhole,3-Jul-19,Lauren Foiles,,There were significant drilling issues in the first 140m of this hole. This resulted in very poor recovery and a large deviation in the drill hole that is reflected in measured recovery and the down hole surveys. The azimuth varies from -70 to -83 degrees.,,COMPLETE
14 | GC19-0921,DRILLHOLE,2019,GCMC 2019,WF,UTM_NAD83_09N,350810.471,6332915.098,794.42,243,107,-55,Resource,DD,Hy-Tech,Drill 3,18-Jun-19,19-Jun-19,42,,,,RES-SW-004,108,-55,250,GPS (Hand held),Hy-Tech,29-May-19,Pre anchor dip angle; trouble with devisight phone interface which is why the gps was used.,28-Jun-19,Lauren Foiles,,,,COMPLETE
15 | GC19-0922,DRILLHOLE,2019,GCMC 2019,SW,UTM_NAD83_09N,349994.429,6333576.275,940.56,438,358,-63,MET,DD,Hy-Tech,Drill 3,22-Jun-19,26-Jun-19,170,,,,MET_SW-08,358,-63,400,Devisight,Hy-Tech,22-Jun-19,,7-Jul-19,Well-Shen Lee,,casing down hole to 43.5 m,,COMPLETE
16 | GC19-0923,DRILLHOLE,2019,GCMC 2019,CZRPL,UTM_NAD83_09N,351373.001,6335112.291,716.54,690,88,-90,Resource,DD,Hy-Tech,Drill 2,22-Jun-19,2-Jul-19,74,,,,RES-CEN-003,88,-90,670,Devisight,Hy-Tech,24-Jun-19,,1-Aug-19,Keith Roberts,,Upper part of hole above 250m logged by Zuzka Gazdik,,COMPLETE
17 | GC19-0924,DRILLHOLE,2019,GCMC 2019,JNS,UTM_NAD83_09N,349216.91,6335376.497,1089.17,510,119,-55,Resource,DD,Hy-Tech,Drill 4,24-Jun-19,2-Jul-19,216,,,,Res_Jcn_009,118,-55,375,Devisight,Hy-Tech,24-Jun-19,,14-Jul-19,Raja Yarra,,Lauren Foiles logged to 258.2m. Raja Yarra logged from 258.2 to 510 (EOH),,COMPLETE
18 | GC19-0925,DRILLHOLE,2019,GCMC 2019,SW,UTM_NAD83_09N,350112.23,6333653.843,915.29,327,358,-63,MET,DD,Hy-Tech,Drill 3,28-Jun-19,1-Jul-19,111,,,,Met_SW-09,358,-63,230,Devisight,Hy-Tech,27-Jun-19,,18-Jul-19,Dave Zeko,,See full hole summary for details on lithology; alteration and structure.,,COMPLETE
19 | GC19-0926,DRILLHOLE,2019,GCMC 2019,SW,UTM_NAD83_09N,350051.585,6333390.901,937.83,657,344,-85,Resource,DD,Hy-Tech,Drill 3,1-Jul-19,9-Jul-19,132,,,,Res_SW_011,343,-85,600,Devisight,Hy-Tech,1-Jul-19,,12-Aug-19,Well-Shen Lee,,,,COMPLETE
20 | GC19-0927,DRILLHOLE,2019,GCMC 2019,JNN,UTM_NAD83_09N,349659.262,6336165.891,1205.37,345,88,-65,MET,DD,Hy-Tech,Drill 4,2-Jul-19,4-Jul-19,123,,,,Met_JN_07,100,-77,300,Devisight,Hy-Tech,2-Jul-19,,18-Jul-19,Well-Shen Lee,,Between boxes 69 70 71 (130.5 to 132m has 2.91 m of recovered stick rock); 1.59m of excess drill core. PLT is down. No data recovered for entire drill hole.,,COMPLETE
21 | GC19-0928,DRILLHOLE,2019,GCMC 2019,JNN,UTM_NAD83_09N,349994.819,6336725.533,1297.99,461,98,-65,Exploration,DD,Hy-Tech,Drill 2,3-Jul-19,10-Jul-19,110.1,,,,Res_Jcn_010,8,-65,350,Devisight,Hy-Tech,3-Jul-19,,13-Aug-19,Raja Yarra,,,,COMPLETE
22 | GC19-0929A,DRILLHOLE,2019,GCMC 2019,JNS,UTM_NAD83_09N,349491.421,6335683.882,1069.52,312,128,-75,Resource,DD,Hy-Tech,Drill 4,7-Jul-19,12-Jul-19,93,,,,Res_Jcn_012,128,-75,275,Devisight,Hy-Tech,7-Jul-19,,25-Jul-19,Dave Zeko,,,,COMPLETE
23 | GC19-0930,DRILLHOLE,2019,GCMC 2019,SW,UTM_NAD83_09N,349994.46,6333576.268,940.48,180,358,-63,MET,DD,Hy-Tech,Drill 2,10-Jul-19,12-Jul-19,163,,,,MET-SW-08 redrill,358,-68,180,Devisight,Hy-Tech,10-Jul-19,,18-Jul-19,Raja Yarra,,,,COMPLETE
24 | GC19-0931,DRILLHOLE,2019,GCMC 2019,CZRPL,UTM_NAD83_09N,350911.612,6334702.49,776.83,494.39,198,-82,Resource,DD,Hy-Tech,Drill 2,13-Jul-19,20-Jul-19,135.2,,,,RES-CEN-010,198,-82,500,Devisight,Hy-Tech,21-Jul-19,,14-Aug-19,Claire Leighton,,,,COMPLETE
25 | GC19-0932,DRILLHOLE,2019,GCMC 2019,SW,UTM_NAD83_09N,350218.248,6333356.202,919.46,528,354,-67,Resource,DD,Hy-Tech,Drill 4,12-Jul-19,22-Jul-19,164,,,,Res_SW_008,353,-67,520,Devisight,Hy-Tech,20-Jul-19,,2-Sep-19,Claire Leighton,,,,COMPLETE
26 | GC19-0933,DRILLHOLE,2019,GCMC 2019,JNS,UTM_NAD83_09N,349493.24,6335685.7,1067.08,489,128,-49,Resource,DD,Hy-Tech,Drill 4,13-Jul-19,20-Jul-19,114.5,,,,RES-JCN-011,128,-50,400,Devisight,Hy-Tech,15-Jul-19,,3-Sep-19,Keith Roberts,,,,COMPLETE
27 | GC19-0934,DRILLHOLE,2019,GCMC 2019,WF,UTM_NAD83_09N,350435.253,6332537.689,898.86,396,41,-60,Exploration,DD,Hy-Tech,Drill 4,21-Jul-19,24-Jul-19,63.5,,,,RES-WF-001,43,-60,300,Devisight,Hy-Tech,21-Jul-19,,22-Aug-19,Dave Zeko,,,,COMPLETE
28 | GC19-0935,DRILLHOLE,2019,GCMC 2019,SW,UTM_NAD83_09N,350220.311,6333360.672,918.8,519,354,-90,Resource,DD,Hy-Tech,Drill 3,22-Jul-19,30-Jul-19,136.74,,,,RES-SW-012,353,-90,450,Devisight,Hy-Tech,24-Jul-19,,16-Sep-19,Jessica Prince,,,,COMPLETE
29 | GC19-0936,DRILLHOLE,2019,GCMC 2019,WF,UTM_NAD83_09N,350615.973,6333415.258,854.18,333,157,-60,Exploration,DD,Hy-Tech,Drill 4,25-Jul-19,5-Aug-19,125,,,,RES_WF_002,158,-60,300,Devisight,Hy-Tech,25-Jul-19,,4-Aug-19,Well-Shen Lee,,,,COMPLETE
30 | GC19-0937,DRILLHOLE,2019,GCMC 2019,JNGAP,UTM_NAD83_09N,349182.9,6335061.843,1097.8,360,111,-55,Exploration,DD,Hy-Tech,Drill 4,29-Jul-19,4-Aug-19,210,,,,Res_Jcn_013,111,-55,350,Devisight,Hy-Tech,29-Jul-19,,17-Aug-19,Well-Shen Lee,,,,COMPLETE
31 | GC19-0938,DRILLHOLE,2019,GCMC 2019,SW,UTM_NAD83_09N,350214.963,6333285.425,929.14,282,161,-75,Resource,DD,Hy-Tech,Drill 3,30-Jul-19,7-Aug-19,103.5,,,,Res_SW_007,163,-75,240,Devisight,Hy-Tech,30-Jul-19,,9-Sep-19,Jessica Prince,,Logged by Keith Roberts up to 74m,,COMPLETE
32 | GC19-0939,DRILLHOLE,2019,GCMC 2019,BT,UTM_NAD83_09N,349340.586,6334677.599,1071.2,480,268,-60,Exploration,DD,Hy-Tech,Drill 4,6-Aug-19,12-Aug-19,135,,,,Res_Jcn_014,268,-60,350,Devisight,Hy-Tech,6-Aug-19,,25-Aug-19,Raja Yarra,,,,COMPLETE
33 | GC19-0940,DRILLHOLE,2019,GCMC 2019,WF,UTM_NAD83_09N,350452,6332852,887.2,429,210,-90,Resource,DD,Hy-Tech,Drill 3,8-Aug-19,13-Aug-19,63.5,,,,Res_SW_002,208,-90,365,Devisight,Hy-Tech,12-Aug-19,,25-Aug-19,Claire Leighton,,alteration destroyed textures in the shallow breccia and matrix-cement-clast characteristics could not be reliably determined.,,COMPLETE
34 | GC19-0941,DRILLHOLE,2019,GCMC 2019,WF,UTM_NAD83_09N,350451.755,6332853.048,884.35,300,208,-60,Resource,DD,Hy-Tech,Drill 3,14-Aug-19,16-Aug-19,50,,,,RES-SW-001,208,-67,300,Devisight,Hy-Tech,15-Aug-19,,5-Sep-19,Claire Leighton,,,,COMPLETE
35 | GC19-0942,DRILLHOLE,2019,GCMC 2019,SDL,UTM_NAD83_09N,352781.382,6332483.427,1580.08,315.9,180,-50,Resource,DD,Hy-Tech,Drill 4,14-Aug-19,19-Aug-19,315.9,,,,RES-SDL-003,180,-50,300,Devisight,Hy-Tech,14-Aug-19,Survey date not on survey 123.,12-Sep-19,Dave Zeko,,Consistent stick rock not intercepted.,,COMPLETE
36 | GC19-0943,DRILLHOLE,2019,GCMC 2019,BNTFL,UTM_NAD83_09N,351720.194,6334768.087,706.59,681,257,-78,Resource,DD,Hy-Tech,Drill 3,19-Aug-19,28-Aug-19,159,187.5,,,Res_Cen_015,257,-78,650,Devisight,Hy-Tech,21-Aug-19,,25-Sep-19,Dave Zeko,,Block error resulted in change of final depth from 684 to 681m.,,COMPLETE
37 | GC19-0944,DRILLHOLE,2019,GCMC 2019,SDL,UTM_NAD83_09N,352781.37,6332483.46,1580.03,447,180,-75,Exploration,DD,Hy-Tech,Drill 4,19-Aug-19,25-Aug-19,340.86,,,,Res_Sdl_004,180,-75,400,Devisight,Hy-Tech,19-Aug-19,,25-Sep-19,Keith Roberts,,,,COMPLETE
38 | GC19-0945,DRILLHOLE,2019,GCMC 2019,JNS,UTM_NAD83_09N,349523,6335325,1038,417,129,-57,Exploration,DD,Hy-Tech,Drill 4,2-Sep-19,7-Sep-19,217.5,,,,Res-JCN-016,130,-57,350,Devisight,Hy-Tech,2-Sep-19,,8-Sep-19,Raja Yarra,,no quicklog recorded because it was logged direct from rig.,,COMPLETE
39 | GC19-0946,DRILLHOLE,2019,GCMC 2019,CZRPL,UTM_NAD83_09N,351600.108,6335289.407,701.86,543,112,-74,Resource,DD,Hy-Tech,Drill 3,28-Aug-19,4-Sep-19,171.5,,,,Res_Cen_013,113,-74,600,Devisight,Hy-Tech,29-Aug-19,,25-Sep-19,Erich Schmitt,,Target: Resource infill and waste conversion (in pit); exploration and resource expansion (below pit).,,COMPLETE
40 | GC19-0947,DRILLHOLE,2019,GCMC 2019,BNTFL,UTM_NAD83_09N,350844,6333757,787,768,78,-77,Resource,DD,Hy-Tech,Drill 3,5-Sep-19,13-Sep-19,55,,,,RES-BN-03,78,-77,750,Devisight,Hy-Tech,4-Sep-19,,26-Sep-19,Claire Leighton,,,,COMPLETE
41 | GC19-0948,DRILLHOLE,2019,GCMC 2019,BT,UTM_NAD83_09N,349046.485,6334164.781,1184.09,630,250,-80,Resource,DD,Hy-Tech,Drill 4,8-Sep-19,19-Sep-19,141,,,,RES_BTT_001,248,-80,475,Devisight,Hy-Tech,8-Sep-19,,24-Sep-19,Raja Yarra,,,,COMPLETE
--------------------------------------------------------------------------------
/LICENCE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2022 Datarock Pty Ltd
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/desurveying.py:
--------------------------------------------------------------------------------
1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/Desurveying/02_desurveying.ipynb.
2 |
3 | # %% auto 0
4 | __all__ = ['desurvey_dataframe_set', 'desurvey_dataframe_custom', 'plot_drill_trace', 'export_vtp']
5 |
6 | # %% ../nbs/Desurveying/02_desurveying.ipynb 4
7 | import warnings
8 | warnings.filterwarnings("ignore")
9 |
10 | import pandas as pd
11 | import numpy as np
12 | import math
13 | import itertools
14 | import plotly.graph_objects as go
15 | from plotly.subplots import make_subplots
16 | import plotly.express as px
17 | from scipy import interpolate
18 | from scipy.interpolate import interpn
19 | import matplotlib
20 | import matplotlib.cm as cm
21 | from vtkmodules import util
22 | import vtkmodules.all as vtk
23 | from vtkmodules.util import numpy_support
24 |
25 | from drillhole_utils import *
26 | from desurveying_utils import *
27 |
28 | # %% ../nbs/Desurveying/02_desurveying.ipynb 6
29 | def desurvey_dataframe_set(cdf: pd.DataFrame, # Pandas dataframe containing the collar information
30 | sdf: pd.DataFrame, # Pandas dataframe containing the survey information
31 | cHOLEID: str, # Collar HoleID column name
32 | EAST: str, # Collar Easting/Longitude column name
33 | NORTH: str, # Collar Northing/Latitude column name
34 | RL: str, # Collar Relative Sea Level/Z column name
35 | TD: str, # Collar total depth column name
36 | sHOLEID: str, # Survey HoleID column name
37 | AZIM: str, # Survey Azimuth column name
38 | DIP: str, # Survey Dip column name
39 | DEPTH: str, #Survey Depth column name
40 | interval: float, # Interval to desurvey to
41 | desurvey_type: str='min_curve', # Desurvey method selection (min_curve or tangent),
42 | add_90: bool=True, #Adding survey survey info to be 90 degrees to missing data
43 | )->pd.DataFrame:
44 | """This function takes two Pandas dataframes containing collar and survey information,
45 | and combines them into one dataframe. It then desurveys the dataframe to a given interval
46 | using either min_curve or tangent desurvey methods. The function returns a desurveyed Pandas dataframe."""
47 |
48 | # Combines dataframe
49 | adf = combine_collar_survey(cdf, sdf, cHOLEID=cHOLEID, EAST=EAST, NORTH=NORTH, RL=RL, TD=TD, sHOLEID=sHOLEID, AZIM=AZIM, DIP=DIP, DEPTH=DEPTH, add_90=add_90)
50 |
51 | #Desurveys dataframe
52 | desdf = adf.groupby("HOLEID").apply(desurvey_hole_to_interval, interval=interval, desurvey_type=desurvey_type).reset_index(drop=False).drop("level_1", axis=1)
53 |
54 | return desdf
55 |
56 | # %% ../nbs/Desurveying/02_desurveying.ipynb 10
57 | def desurvey_dataframe_custom(cdf: pd.DataFrame, # Pandas dataframe containing the collar information
58 | sdf: pd.DataFrame, # Pandas dataframe containing the survey information
59 | dfi: pd.DataFrame, # Pandas dataframe containing the intervals to desurvey to
60 | cHOLEID: str, # Collar HoleID column name
61 | EAST: str, # Collar Easting/Longitude column name
62 | NORTH: str, # Collar Northing/Latitude column name
63 | RL: str, # Collar Relative Sea Level/Z column name
64 | TD: str, # Collar total depth column name
65 | sHOLEID: str, # Survey HoleID column name
66 | AZIM: str, # Survey Azimuth column name
67 | DIP: str, # Survey Dip column name
68 | iHOLEID: str, # Interval data HoleID column
69 | iTO: str, # To column for interval data
70 | iFROM: str, # From column for interval data
71 | DEPTH: str, #Survey Depth column name
72 | desurvey_type: str='min_curve', # Desurvey method selection (min_curve or tangent)
73 | merge_data: bool=True, #Option whether to merge the data back with the desurveyed data
74 | add_90: bool=True, #Adding survey survey info to be 90 degrees to missing data
75 | )->pd.DataFrame:
76 | """This function desurveys a dataframe using custom intervals. It takes in two pandas dataframes containing
77 | collar and survey information, as well as a third pandas dataframe containing the intervals to desurvey to.
78 | It also takes in strings for the column names of each dataframe. The function then renames the columns of
79 | the interval data, combines the collar and survey dataframes, and desurveys it using the custom intervals.
80 | It returns a pandas dataframe with the desurveyed information."""
81 |
82 | #Rename column names in interval data
83 | dfi = dfi.rename(columns={iHOLEID:'HOLEID', iFROM:'FROM', iTO:'TO'})
84 |
85 | # Combines dataframe
86 | adf = combine_collar_survey(cdf, sdf, cHOLEID=cHOLEID, EAST=EAST, NORTH=NORTH, RL=RL, TD=TD, sHOLEID=sHOLEID, AZIM=AZIM, DIP=DIP, DEPTH=DEPTH, add_90=add_90)
87 |
88 | #Desurveys dataframe
89 | desdf = desurvey_df_from_df_custom(adf, dfi, desurvey_type=desurvey_type, merge_data=merge_data)
90 |
91 | return desdf
92 |
93 | # %% ../nbs/Desurveying/02_desurveying.ipynb 14
94 | def plot_drill_trace(
95 | dfc: pd.DataFrame, # Desuveryed dataframe
96 | col2plot: str, # Column to plot
97 | annotate=False, # Add collar labels to drillholes (default False)
98 | )->go.Figure:
99 | """This function takes in a dataframe (dfc), a column to plot (col2plot), and an optional parameter
100 | to annotate the drillholes (annotate). It then sets up colour dictionaries, and loops through individual
101 | holes to add traces. Depending on the data type of col2plot, it will plot the hole using either a
102 | numerical colour scale or qualitative colour scale. If annotate is set to True, it will add collar labels
103 | for each drillhole. Finally, it updates the figure with various parameters such as width, height,
104 | legend title text, axis titles, aspect ratio and margins. It then returns the figure."""
105 |
106 | # Remove true easting and northings due to a precision issue
107 | dfc["EAST_md"] = dfc["EAST_md"] - min(dfc["EAST_md"])
108 | dfc["NORTH_md"] = dfc["NORTH_md"] - min(dfc["NORTH_md"])
109 | # dfc['RL'] = dfc['RL']-min(dfc['RL'])
110 |
111 | # Set figure to append plots to
112 | fig = go.Figure()
113 |
114 | # Setup colour dictionaries
115 | if np.issubdtype(dfc[col2plot].dtype, np.number):
116 | minima = np.min(dfc[col2plot])
117 | maxima = np.max(dfc[col2plot])
118 | norm = matplotlib.colors.Normalize(vmin=minima, vmax=maxima, clip=True)
119 | mapper = cm.ScalarMappable(norm=norm, cmap=cm.gist_rainbow_r)
120 |
121 | plotly_cat_dict = {}
122 | for v in dfc[col2plot].unique():
123 | plotly_cat_dict[v] = rgb2hex([int(x * 255) for x in list(mapper.to_rgba(v))])
124 |
125 | # Loop through individual holes and add trace
126 | for idx, holeid in enumerate(dfc["HOLEID"].unique()):
127 |
128 | # Get unique hole
129 | dfc_unique = dfc[dfc["HOLEID"] == holeid]
130 |
131 | # Plot hole depending on what the data type is
132 | if np.issubdtype(dfc_unique[col2plot].dtype, np.number):
133 | fig.add_trace(
134 | go.Scatter3d(
135 | x=dfc_unique["EAST_md"],
136 | y=dfc_unique["NORTH_md"],
137 | z=dfc_unique["RL_md"],
138 | line_color=dfc_unique[col2plot].map(plotly_cat_dict),
139 | mode="lines",
140 | name=holeid,
141 | hoverinfo=["text"],
142 | legendgroup=col2plot,
143 | text=dfc_unique[col2plot],
144 | line=dict(width=30),
145 | )
146 | )
147 | else:
148 | col_dict = dict(zip(dfc[col2plot].unique(), px.colors.qualitative.Light24 * 10))
149 | fig.add_trace(
150 | go.Scatter3d(
151 | x=dfc_unique["EAST_md"],
152 | y=dfc_unique["NORTH_md"],
153 | z=dfc_unique["RL_md"],
154 | line_color=dfc_unique[col2plot].map(col_dict),
155 | mode="lines",
156 | name=holeid,
157 | hoverinfo=["text"],
158 | legendgroup=col2plot,
159 | text=dfc_unique[col2plot],
160 | line=dict(width=30),
161 | )
162 | )
163 |
164 | if annotate:
165 | # Set up annotations
166 | df0 = dfc[dfc["FROM"] == 0].reset_index(drop=True)
167 | ann = [dict(x=x, y=y, z=z, text=holeid, showarrow=False) for x, y, z, holeid in zip(df0["EAST_md"], df0["NORTH_md"], df0["RL_md"] + 10, df0["HOLEID"])]
168 | fig.update_layout(scene=dict(annotations=ann))
169 |
170 | # Update figure
171 | fig.update_layout(width=1000, height=1000, showlegend=False, legend_title_text="Cluster_ID")
172 | fig.update_layout(scene=dict(xaxis_title="X", yaxis_title="Y", zaxis_title="RL"))
173 | fig.update_layout(scene_aspectmode="data", scene_aspectratio=dict(x=1, y=1, z=1))
174 | fig.update_layout(margin=dict(l=0, r=0, b=0, t=0))
175 |
176 | return fig
177 |
178 | # %% ../nbs/Desurveying/02_desurveying.ipynb 17
179 | def export_vtp(df: pd.DataFrame, # Pandas dataframe containing desurveyed data
180 | path: str, #Output directory and filname
181 | )->None:
182 | """This function exports desurveyed drillhole table (dataframe) to vtk lines (vtp file). This file is then ready for use in Paraview and can be dragged and dropped into the software"""
183 |
184 | #create array views
185 | xb = df['EAST_fr'].values
186 | yb = df['NORTH_fr'].values
187 | zb = df['RL_fr'].values
188 | xe = df['EAST_to'].values
189 | ye = df['NORTH_to'].values
190 | ze = df['RL_to'].values
191 |
192 | ##############################
193 | #first we store the data in a set of vtk arrays (that will be cell data)
194 | dlen = xb.shape[0]
195 |
196 | vtkfields={}
197 | for i in df.columns:
198 | # assign the right vtk type
199 | dtype = df[i].dtype
200 |
201 | # copy data
202 | if dtype==np.number:
203 | vtkfields[i]= numpy_support.numpy_to_vtk(df[i].values)
204 | vtkfields[i].SetName(i)
205 | vtkfields[i].SetNumberOfComponents(1)
206 |
207 | else:
208 | # this is fos string array. Not optimized...
209 | vtkfields[i]= vtk.vtkStringArray()
210 | vtkfields[i].SetName(i)
211 | vtkfields[i].SetNumberOfComponents(1)
212 | vtkfields[i].SetNumberOfTuples(dlen)
213 |
214 | for l in range(dlen):
215 | vtkfields[i].SetValue(l, str(df[i].iloc[l]))
216 | ##############################
217 |
218 | # now we create a set of vtk points
219 | points= vtk.vtkPoints()
220 | npoints = dlen*2
221 | points.SetNumberOfPoints(npoints)
222 |
223 | # now we create a set of lines representing the cores and
224 | # a line container (a cell array)
225 | line = vtk.vtkLine()
226 | lines = vtk.vtkCellArray()
227 |
228 | # populate this data
229 | n=-1
230 | for l in range(dlen):
231 |
232 | n=n+1
233 | points.SetPoint(n, xb[l], yb[l], zb[l])
234 | line.GetPointIds().SetId(0, n)
235 | n=n+1
236 | points.SetPoint(n, xe[l], ye[l], ze[l])
237 | line.GetPointIds().SetId(1, n)
238 | lines.InsertNextCell(line)
239 |
240 |
241 | # Create a polydata to store everything in
242 | linesPolyData = vtk.vtkPolyData()
243 |
244 | # Add the points to the dataset
245 | linesPolyData.SetPoints(points)
246 |
247 | # Add the lines to the dataset
248 | linesPolyData.SetLines(lines)
249 |
250 | #add properties
251 | for i in vtkfields:
252 | linesPolyData.GetCellData().AddArray(vtkfields[i])
253 |
254 | # save data to VTK file
255 | assert linesPolyData.GetClassName()=='vtkPolyData', 'error input vtk object is of type {}, a vtkPolyData was expected'.format(linesPolyData.GetClassName())
256 |
257 | # add extension to path
258 | if not path.lower().endswith('.vtp'):
259 | path = path + '.vtp'
260 |
261 | writer = vtk.vtkXMLPolyDataWriter()
262 | writer.SetFileName(path)
263 | writer.SetInputData(linesPolyData)
264 | writer.Write()
265 |
266 |
267 |
--------------------------------------------------------------------------------
/Example_Data/Galore_Creek/GCMC_2019_Survey.csv:
--------------------------------------------------------------------------------
1 | HOLEID,DEPTH,SURVTYPE,AZIMUTH,DIP
2 | GC19-0909,30,REFLEX,107.5,-54.9
3 | GC19-0909,81,REFLEX,105.1,-55.4
4 | GC19-0909,132,REFLEX,106.1,-56
5 | GC19-0909,183,REFLEX,104.3,-56.5
6 | GC19-0909,234,REFLEX,104.9,-56.4
7 | GC19-0909,285,REFLEX,105.5,-56.3
8 | GC19-0909,336,REFLEX,106.7,-55.9
9 | GC19-0909,387,REFLEX,109.4,-55.5
10 | GC19-0909,438,REFLEX,109.4,-54.8
11 | GC19-0909,489,REFLEX,111,-54.6
12 | GC19-0909,540,REFLEX,111.7,-54
13 | GC19-0909,591,REFLEX,115.4,-53.5
14 | GC19-0909,642,REFLEX,115.5,-52.9
15 | GC19-0909,693,REFLEX,116,-52.5
16 | GC19-0909,744,REFLEX,118.5,-51.8
17 | GC19-0909,795,REFLEX,123.5,-50.6
18 | GC19-0909,846,REFLEX,120.25,-49.4
19 | GC19-0909,897,REFLEX,117,-48
20 | GC19-0910,51,REFLEX,91.2,-67.1
21 | GC19-0910,102,REFLEX,91.45,-67.8
22 | GC19-0910,129,REFLEX,91.7,-68.5
23 | GC19-0910,180,REFLEX,88.9,-68.7
24 | GC19-0910,231,REFLEX,89,-69
25 | GC19-0910,282,REFLEX,87,-69.5
26 | GC19-0910,333,REFLEX,89.9,-69.9
27 | GC19-0910,384,REFLEX,91.8,-69.9
28 | GC19-0910,435,REFLEX,95.6,-70.4
29 | GC19-0910,486,REFLEX,98,-70.7
30 | GC19-0911,15,REFLEX,295.1,-68.6
31 | GC19-0911,51,REFLEX,287,-68.9
32 | GC19-0911,102,REFLEX,286.3,-69
33 | GC19-0911,153,REFLEX,286.7,-69.2
34 | GC19-0911,204,REFLEX,291.1,-68.7
35 | GC19-0911,255,REFLEX,290.7,-68.3
36 | GC19-0911,306,REFLEX,293.2,-68.9
37 | GC19-0911,357,REFLEX,297.8,-69
38 | GC19-0912,51,REFLEX,84.8,-65.9
39 | GC19-0912,102,REFLEX,84.6,-66.8
40 | GC19-0912,153,REFLEX,83.9,-67.1
41 | GC19-0912,204,REFLEX,86.3,-66.3
42 | GC19-0912,255,REFLEX,88.5,-65.7
43 | GC19-0912,306,REFLEX,89.8,-65.1
44 | GC19-0912,357,REFLEX,92.5,-64.2
45 | GC19-0912,408,REFLEX,94.7,-63.6
46 | GC19-0912,459,REFLEX,96.2,-62.8
47 | GC19-0912,510,REFLEX,96.8,-61.8
48 | GC19-0912,552,REFLEX,98.5,-61
49 | GC19-0913,24,REFLEX,129.7,-62.7
50 | GC19-0913,75,REFLEX,129.2,-63
51 | GC19-0913,126,REFLEX,129.85,-62.7
52 | GC19-0913,177,REFLEX,130.5,-62.2
53 | GC19-0913,228,REFLEX,131.3,-61.9
54 | GC19-0913,279,REFLEX,131.8,-61.6
55 | GC19-0913,330,REFLEX,132.3,-61.5
56 | GC19-0914,39,REFLEX,88.3,-73.2
57 | GC19-0914,90,REFLEX,88.7,-73.8
58 | GC19-0914,141,REFLEX,88.7,-74.5
59 | GC19-0914,192,REFLEX,87.3,-74.7
60 | GC19-0914,243,REFLEX,89.6,-74.9
61 | GC19-0914,294,REFLEX,90.9,-75.1
62 | GC19-0914,345,REFLEX,93.7,-75.1
63 | GC19-0914,396,REFLEX,95.5,-75.1
64 | GC19-0914,447,REFLEX,97.3,-75.1
65 | GC19-0914,498,REFLEX,97.2,-75.1
66 | GC19-0914,525,REFLEX,98.1,-75.3
67 | GC19-0915,51,REFLEX,266.7,-78.4
68 | GC19-0915,102,REFLEX,263.95,-78.6
69 | GC19-0915,153,REFLEX,261.2,-78.8
70 | GC19-0915,204,REFLEX,261.5,-78.2
71 | GC19-0915,255,REFLEX,267.2,-78.3
72 | GC19-0915,306,REFLEX,271.9,-78.3
73 | GC19-0915,357,REFLEX,273.25,-78.9
74 | GC19-0915,384,REFLEX,274.6,-79
75 | GC19-0916,45,REFLEX,109.3,-57.3
76 | GC19-0916,96,REFLEX,107.3,-58.1
77 | GC19-0916,147,REFLEX,107.2,-58.7
78 | GC19-0916,198,REFLEX,107.3,-58.7
79 | GC19-0916,249,REFLEX,109,-58.9
80 | GC19-0916,300,REFLEX,109.8,-59.2
81 | GC19-0916,351,REFLEX,110,-59.1
82 | GC19-0916,402,REFLEX,110.8,-59.4
83 | GC19-0916,453,REFLEX,113,-59.5
84 | GC19-0916,504,REFLEX,116,-59.2
85 | GC19-0916,555,REFLEX,114.8,-58.9
86 | GC19-0916,606,REFLEX,116.7,-58.5
87 | GC19-0916,657,REFLEX,120.5,-58
88 | GC19-0916,708,REFLEX,122.9,-57.3
89 | GC19-0916,759,REFLEX,124.9,-56.5
90 | GC19-0916,861,REFLEX,126.9,-54.2
91 | GC19-0917,45,REFLEX,52.5,-89.1
92 | GC19-0917,96,REFLEX,108.5,-89.3
93 | GC19-0917,147,REFLEX,145.5,-89.8
94 | GC19-0917,198,REFLEX,180.5,-89.8
95 | GC19-0917,249,REFLEX,171.9,-89.6
96 | GC19-0917,300,REFLEX,131.9,-89.4
97 | GC19-0917,351,REFLEX,175.5,-89.3
98 | GC19-0917,402,REFLEX,164.6,-89.4
99 | GC19-0917,453,REFLEX,171.1,-89.4
100 | GC19-0917,504,REFLEX,232.7,-89.7
101 | GC19-0917,555,REFLEX,233.1,-89.6
102 | GC19-0918,81,REFLEX,295.4,-88.4
103 | GC19-0918,132,REFLEX,296.7,-88.4
104 | GC19-0918,183,REFLEX,294.7,-88.1
105 | GC19-0918,234,REFLEX,299.1,-88
106 | GC19-0918,285,REFLEX,297.7,-88.1
107 | GC19-0918,336,REFLEX,301.2,-88
108 | GC19-0918,381,REFLEX,307.2,-87.8
109 | GC19-0918,432,REFLEX,306.5,-87.9
110 | GC19-0918,483,REFLEX,310.6,-87.8
111 | GC19-0918,534,REFLEX,306.7,-87.5
112 | GC19-0918,585,REFLEX,313.5,-87.5
113 | GC19-0918,636,REFLEX,309.3,-87.3
114 | GC19-0918,687,REFLEX,310.4,-87.1
115 | GC19-0918,738,REFLEX,311,-87.2
116 | GC19-0918,789,REFLEX,318.8,-87.2
117 | GC19-0918,825,REFLEX,315.6,-87.1
118 | GC19-0919,33,REFLEX,255.6,-74.8
119 | GC19-0919,90,REFLEX,254.65,-75.2
120 | GC19-0919,135,REFLEX,253.7,-75.3
121 | GC19-0919,186,REFLEX,253.5,-75.5
122 | GC19-0919,237,REFLEX,254.9,-75.4
123 | GC19-0919,285,REFLEX,258.2,-75.4
124 | GC19-0919,339,REFLEX,256.1,-75.5
125 | GC19-0919,390,REFLEX,258.8,-75.9
126 | GC19-0919,441,REFLEX,259.4,-76.2
127 | GC19-0920,30,REFLEX,88.7,-66.2
128 | GC19-0920,81,REFLEX,90.6,-66.6
129 | GC19-0920,132,REFLEX,96.3,-65.3
130 | GC19-0920,183,REFLEX,102.6,-65.1
131 | GC19-0920,234,REFLEX,101.7,-65.3
132 | GC19-0920,270,REFLEX,101.9,-65.5
133 | GC19-0921,27,REFLEX,104.8,-55.3
134 | GC19-0921,78,REFLEX,105.7,-55.5
135 | GC19-0921,129,REFLEX,99.6,-55
136 | GC19-0921,180,REFLEX,98.5,-54.5
137 | GC19-0921,231,REFLEX,98.9,-53.6
138 | GC19-0921,243,REFLEX,99.5,-53.1
139 | GC19-0922,57,REFLEX,347.5,-63.7
140 | GC19-0922,105,REFLEX,354.4,-64.5
141 | GC19-0922,156,REFLEX,351.4,-65.2
142 | GC19-0922,207,REFLEX,356.5,-66
143 | GC19-0922,258,REFLEX,355.25,-66.4
144 | GC19-0922,309,REFLEX,354,-66.5
145 | GC19-0922,312,REFLEX,351.2,-66.6
146 | GC19-0922,363,REFLEX,354.1,-66.2
147 | GC19-0922,414,REFLEX,355.04,-65.6
148 | GC19-0923,45,REFLEX,67.5,-89
149 | GC19-0923,96,REFLEX,75.6,-89.5
150 | GC19-0923,147,REFLEX,91,-89.4
151 | GC19-0923,198,REFLEX,65.6,-89.5
152 | GC19-0923,249,REFLEX,49.2,-89.4
153 | GC19-0923,300,REFLEX,56.1,-89.5
154 | GC19-0923,351,REFLEX,72.2,-89.4
155 | GC19-0923,402,REFLEX,64.9,-89.4
156 | GC19-0923,453,REFLEX,62.8,-89.4
157 | GC19-0923,504,REFLEX,72.4,-89.4
158 | GC19-0923,555,REFLEX,70.1,-89.3
159 | GC19-0923,606,REFLEX,66.6,-89.2
160 | GC19-0923,657,REFLEX,78.5,-89.3
161 | GC19-0923,690,REFLEX,79.4,-89.3
162 | GC19-0924,30,REFLEX,112.4,-54.3
163 | GC19-0924,81,REFLEX,112.1,-55.2
164 | GC19-0924,132,REFLEX,112.4,-56
165 | GC19-0924,183,REFLEX,113.05,-56.7
166 | GC19-0924,213,REFLEX,113.7,-56.8
167 | GC19-0924,285,REFLEX,116.2,-56.3
168 | GC19-0924,336,REFLEX,115.6,-55.4
169 | GC19-0924,387,REFLEX,114.3,-54.7
170 | GC19-0924,411,REFLEX,115.2,-54.1
171 | GC19-0924,462,REFLEX,119.8,-52.9
172 | GC19-0924,510,REFLEX,121,-51.5
173 | GC19-0925,30,REFLEX,355,-61.2
174 | GC19-0925,81,REFLEX,355.75,-61.6
175 | GC19-0925,132,REFLEX,356.5,-62.1
176 | GC19-0925,183,REFLEX,350.1,-63
177 | GC19-0925,234,REFLEX,348.5,-63.1
178 | GC19-0925,285,REFLEX,349.3,-63.3
179 | GC19-0925,327,REFLEX,348.8,-63.4
180 | GC19-0926,36,REFLEX,340.9,-86.8
181 | GC19-0926,87,REFLEX,345.4,-87.2
182 | GC19-0926,141,REFLEX,0.2,-87.2
183 | GC19-0926,192,REFLEX,8.6,-87.4
184 | GC19-0926,243,REFLEX,7.5,-87.6
185 | GC19-0926,294,REFLEX,11.7,-87.5
186 | GC19-0926,345,REFLEX,12.3,-87.6
187 | GC19-0926,396,REFLEX,8.3,-87.2
188 | GC19-0926,447,REFLEX,15.5,-87.4
189 | GC19-0926,498,REFLEX,17.5,-87.4
190 | GC19-0926,549,REFLEX,23.3,-87.3
191 | GC19-0926,600,REFLEX,26.3,-87.2
192 | GC19-0926,651,REFLEX,24.4,-87.2
193 | GC19-0927,33,REFLEX,103.5,-78.3
194 | GC19-0927,84,REFLEX,104.7,-77.8
195 | GC19-0927,135,REFLEX,105.5,-78
196 | GC19-0927,186,REFLEX,107.8,-78.3
197 | GC19-0927,237,REFLEX,111.8,-78.1
198 | GC19-0927,288,REFLEX,114.8,-77.8
199 | GC19-0927,339,REFLEX,118.3,-77.6
200 | GC19-0928,28,REFLEX,97,-65.7
201 | GC19-0928,79.5,REFLEX,93.4,-66.9
202 | GC19-0928,130.5,REFLEX,94.4,-68.3
203 | GC19-0928,180,REFLEX,94.7,-68.6
204 | GC19-0928,231,REFLEX,93.8,-69.6
205 | GC19-0928,282,REFLEX,99,-69.5
206 | GC19-0928,333,REFLEX,101.15,-69.3
207 | GC19-0928,384,REFLEX,103.3,-69.1
208 | GC19-0928,435,REFLEX,102.45,-68.6
209 | GC19-0928,459,REFLEX,101.6,-68.4
210 | GC19-0929,0,COLLAR,128,-75
211 | GC19-0929A,16.5,REFLEX,120.6,-75.4
212 | GC19-0929A,55.2,REFLEX,121.8,-75.6
213 | GC19-0929A,108,REFLEX,123,-76.4
214 | GC19-0929A,189,REFLEX,120.9,-76.9
215 | GC19-0929A,210,REFLEX,123.6,-76.2
216 | GC19-0929A,261,REFLEX,122.1,-76
217 | GC19-0929A,312,REFLEX,116.1,-75.2
218 | GC19-0930,28.5,REFLEX,1.3,-67.3
219 | GC19-0930,79.5,REFLEX,358,-67.8
220 | GC19-0930,130,REFLEX,355.8,-67.7
221 | GC19-0930,180,REFLEX,353.05,-67.3
222 | GC19-0931,46.5,REFLEX,192.6,-81.9
223 | GC19-0931,97.5,REFLEX,194.6,-81.8
224 | GC19-0931,148.5,REFLEX,198.3,-81.8
225 | GC19-0931,201,REFLEX,197.2,-82
226 | GC19-0931,252,REFLEX,199.4,-82.1
227 | GC19-0931,303,REFLEX,202.3,-82
228 | GC19-0931,354,REFLEX,205.9,-81.6
229 | GC19-0931,405,REFLEX,208.1,-81.6
230 | GC19-0931,456,REFLEX,208.5,-81.6
231 | GC19-0931,494,REFLEX,209.5,-81.5
232 | GC19-0932,67.5,REFLEX,356.7,-64.2
233 | GC19-0932,118.5,REFLEX,355.1,-65.8
234 | GC19-0932,168,REFLEX,355.2,-66.6
235 | GC19-0932,219,REFLEX,355.8,-66.7
236 | GC19-0932,270,REFLEX,351,-67.1
237 | GC19-0932,321,REFLEX,347.4,-67.6
238 | GC19-0932,372,REFLEX,354.7,-67.6
239 | GC19-0932,423,REFLEX,356.6,-67.4
240 | GC19-0932,474,REFLEX,359.7,-67.1
241 | GC19-0932,525,REFLEX,358,-66.7
242 | GC19-0933,33,REFLEX,118.9,-49.2
243 | GC19-0933,88.5,REFLEX,122.5,-49.8
244 | GC19-0933,138,REFLEX,122.1,-50.4
245 | GC19-0933,189,REFLEX,119.1,-50
246 | GC19-0933,240,REFLEX,119.4,-50.1
247 | GC19-0933,291,REFLEX,121,-49.6
248 | GC19-0933,342,REFLEX,126.1,-48.5
249 | GC19-0933,393,REFLEX,128.3,-47.9
250 | GC19-0933,444,REFLEX,134.4,-45.8
251 | GC19-0933,489,REFLEX,129.1,-44.4
252 | GC19-0934,39,REFLEX,39.5,-60.3
253 | GC19-0934,90,REFLEX,40,-61.2
254 | GC19-0934,141,REFLEX,40.5,-61.1
255 | GC19-0934,192,REFLEX,41.4,-61.5
256 | GC19-0934,243,REFLEX,45.1,-58.5
257 | GC19-0934,294,REFLEX,45.7,-56.8
258 | GC19-0934,345,REFLEX,47.3,-56.1
259 | GC19-0934,396,REFLEX,48.3,-53.6
260 | GC19-0935,57,REFLEX,260,-88.4
261 | GC19-0935,108,REFLEX,246.3,-88.4
262 | GC19-0935,159,REFLEX,240.3,-88.4
263 | GC19-0935,210,REFLEX,240.8,-88.1
264 | GC19-0935,261,REFLEX,242.7,-87.9
265 | GC19-0935,312,REFLEX,239.6,-87.9
266 | GC19-0935,363,REFLEX,243.1,-88
267 | GC19-0935,414,REFLEX,236,-88.1
268 | GC19-0935,450,REFLEX,238.9,-87.9
269 | GC19-0935,501,REFLEX,238.1,-88.3
270 | GC19-0935,519,REFLEX,239.5,-88.3
271 | GC19-0936,57,REFLEX,160.16,-60.7
272 | GC19-0936,174,REFLEX,160.14,-60.36
273 | GC19-0936,228,REFLEX,160.12,-59.3
274 | GC19-0936,279,REFLEX,161.54,-58.49
275 | GC19-0936,330,REFLEX,163.72,-57.28
276 | GC19-0937,49.5,REFLEX,108.7,-53.6
277 | GC19-0937,100.5,REFLEX,105.2,-53.5
278 | GC19-0937,150,REFLEX,105.2,-53.7
279 | GC19-0937,201,REFLEX,108.8,-53.8
280 | GC19-0937,252,REFLEX,111.6,-53.9
281 | GC19-0937,303,REFLEX,114.6,-54
282 | GC19-0937,354,REFLEX,117.6,-54.2
283 | GC19-0938,63,REFLEX,164.18,-78.36
284 | GC19-0938,114,REFLEX,166.66,-78.31
285 | GC19-0938,165,REFLEX,168.48,-77.98
286 | GC19-0938,216,REFLEX,168.65,-78.08
287 | GC19-0938,267,REFLEX,171.77,-77.82
288 | GC19-0939,21,REFLEX,266.8,-59.7
289 | GC19-0939,94,REFLEX,267.9,-60.7
290 | GC19-0939,144,REFLEX,267.6,-62
291 | GC19-0939,195,REFLEX,265.3,-62.3
292 | GC19-0939,246,REFLEX,269.2,-62.3
293 | GC19-0939,297,REFLEX,270.9,-61.4
294 | GC19-0939,348,REFLEX,272.55,-60
295 | GC19-0939,399,REFLEX,274.2,-58.6
296 | GC19-0939,450,REFLEX,278.6,-57.2
297 | GC19-0939,480,REFLEX,278.6,-56.3
298 | GC19-0940,66,REFLEX,71.48,-88.32
299 | GC19-0940,117,REFLEX,80.08,-88.65
300 | GC19-0940,168,REFLEX,86.27,-88.5
301 | GC19-0940,219,REFLEX,88.27,-88.45
302 | GC19-0940,270,REFLEX,86.55,-88.4
303 | GC19-0940,321,REFLEX,89.08,-88.41
304 | GC19-0940,372,REFLEX,95.89,-88.27
305 | GC19-0940,423,REFLEX,101.8,-88.16
306 | GC19-0941,54,REFLEX,202.4,-70.1
307 | GC19-0941,105,REFLEX,203.6,-69.9
308 | GC19-0941,156,REFLEX,204.9,-69.7
309 | GC19-0941,207,REFLEX,206.7,-69.1
310 | GC19-0941,250,REFLEX,207.6,-68
311 | GC19-0941,300,REFLEX,208.4,-67.1
312 | GC19-0942,18,REFLEX,177.57,-50.2
313 | GC19-0942,69,REFLEX,179.05,-50.1
314 | GC19-0942,120,REFLEX,179.93,-49.83
315 | GC19-0942,171,REFLEX,182.31,-49.69
316 | GC19-0942,222,REFLEX,184.3,-48.8
317 | GC19-0942,273,REFLEX,185.81,-48.22
318 | GC19-0942,315.9,REFLEX,179.93,-49.83
319 | GC19-0943,60,REFLEX,254,-74.1
320 | GC19-0943,111,REFLEX,253.6,-74.1
321 | GC19-0943,162,REFLEX,254.3,-74.1
322 | GC19-0943,213,REFLEX,257.4,-73.9
323 | GC19-0943,263,REFLEX,257.3,-74.1
324 | GC19-0943,315,REFLEX,265.6,-74
325 | GC19-0943,366,REFLEX,264.8,-73.4
326 | GC19-0943,417,REFLEX,267.5,-73.7
327 | GC19-0943,468,REFLEX,270.8,-73.6
328 | GC19-0943,519,REFLEX,272.9,-73
329 | GC19-0943,570,REFLEX,274.4,-72.5
330 | GC19-0943,621,REFLEX,281.4,-71.8
331 | GC19-0943,672,REFLEX,284,-71.3
332 | GC19-0944,12,REFLEX,180.95,-68.99
333 | GC19-0944,63,REFLEX,181.38,-69.12
334 | GC19-0944,114,REFLEX,182.12,-69.46
335 | GC19-0944,165,REFLEX,185.61,-69.49
336 | GC19-0944,216,REFLEX,186.87,-69.58
337 | GC19-0944,267,REFLEX,187.98,-69.64
338 | GC19-0944,318,REFLEX,189.78,-69.64
339 | GC19-0944,369,REFLEX,190.38,-68.82
340 | GC19-0944,420,REFLEX,192.47,-68.76
341 | GC19-0944,447,REFLEX,193.7,-69
342 | GC19-0945,99,REFLEX,127.3,-55.8
343 | GC19-0945,150,REFLEX,126.47,-55.2
344 | GC19-0945,201,REFLEX,125.64,-54.8
345 | GC19-0945,252,REFLEX,124.8,-54.7
346 | GC19-0945,303,REFLEX,130.2,-53.6
347 | GC19-0945,354,REFLEX,131.6,-52
348 | GC19-0945,405,REFLEX,137.3,-49.8
349 | GC19-0946,57,REFLEX,114.6,-73.8
350 | GC19-0946,108,REFLEX,111.8,-73.6
351 | GC19-0946,159,REFLEX,114.2,-73.8
352 | GC19-0946,210,REFLEX,111.6,-73.9
353 | GC19-0946,268,REFLEX,116.6,-74.2
354 | GC19-0946,312,REFLEX,116.9,-74.4
355 | GC19-0946,363,REFLEX,114.8,-74.6
356 | GC19-0946,414,REFLEX,115.4,-74.8
357 | GC19-0946,465,REFLEX,116.5,-75
358 | GC19-0946,516,REFLEX,118.1,-75.2
359 | GC19-0946,543,REFLEX,118,-75.2
360 | GC19-0947,15,REFLEX,74.3,-75.5
361 | GC19-0947,66,REFLEX,79,-75.3
362 | GC19-0947,117,REFLEX,78.2,-75.6
363 | GC19-0947,168,REFLEX,80.8,-76
364 | GC19-0947,219,REFLEX,83.1,-76.1
365 | GC19-0947,270,REFLEX,84.4,-76.5
366 | GC19-0947,321,REFLEX,85.8,-76.2
367 | GC19-0947,372,REFLEX,87.7,-76
368 | GC19-0947,423,REFLEX,89,-76
369 | GC19-0947,474,REFLEX,86.8,-76.2
370 | GC19-0947,525,REFLEX,88.4,-76.3
371 | GC19-0947,575,REFLEX,91.5,-76
372 | GC19-0947,627,REFLEX,97.9,-75.9
373 | GC19-0947,678,REFLEX,101.5,-76
374 | GC19-0947,729,REFLEX,101.6,-75.9
375 | GC19-0948,24,REFLEX,244.3,-80.2
376 | GC19-0948,75,REFLEX,246.2,-80.4
377 | GC19-0948,126,REFLEX,243.4,-80.5
378 | GC19-0948,177,REFLEX,243.7,-80.7
379 | GC19-0948,228,REFLEX,245.6,-80.6
380 | GC19-0948,279,REFLEX,245.7,-80.6
381 | GC19-0948,330,REFLEX,247.3,-80.5
382 | GC19-0948,381,REFLEX,252.7,-80.4
383 | GC19-0948,432,REFLEX,255.1,-80.3
384 | GC19-0948,483,REFLEX,257,-80.3
385 | GC19-0948,534,REFLEX,260.3,-79.8
386 | GC19-0948,585,REFLEX,259,-80
387 | GC19-0948,630,REFLEX,261.5,-80.1
--------------------------------------------------------------------------------
/Example_Data/Galore_Creek/GCMC_2019_SpecificGravity.csv:
--------------------------------------------------------------------------------
1 | HOLEID,GEOLFROM,GEOLTO,SG_MEASUREMENT
2 | GC19-0909,88.3,88.47,2.46
3 | GC19-0909,120.1,120.3,2.32
4 | GC19-0909,135.9,136.1,2.7
5 | GC19-0909,182.75,182.95,2.68
6 | GC19-0909,233,233.14,2.71
7 | GC19-0909,280.72,280.92,2.62
8 | GC19-0909,330.8,331,2.65
9 | GC19-0909,391.56,391.73,2.66
10 | GC19-0909,446,446.21,2.71
11 | GC19-0909,473.66,473.86,2.73
12 | GC19-0909,548.82,549,2.74
13 | GC19-0909,600.36,600.56,2.77
14 | GC19-0909,652.3,652.47,2.72
15 | GC19-0909,701.55,701.74,2.68
16 | GC19-0909,751.8,751.95,2.67
17 | GC19-0909,833.83,834,2.82
18 | GC19-0909,879.33,879.5,2.67
19 | GC19-0910,109.25,109.42,2.7
20 | GC19-0910,161.45,161.7,2.59
21 | GC19-0910,244,244.22,3.11
22 | GC19-0910,321,321.17,2.63
23 | GC19-0910,344.82,345,2.76
24 | GC19-0910,390.72,390.9,2.91
25 | GC19-0910,409,409.17,2.73
26 | GC19-0910,458.27,458.45,2.73
27 | GC19-0910,472.35,472.54,2.77
28 | GC19-0910,499.76,499.95,2.87
29 | GC19-0910,506.14,506.3,3.09
30 | GC19-0911,22.06,22.23,2.65
31 | GC19-0911,217.2,217.35,2.71
32 | GC19-0911,258,258.14,2.68
33 | GC19-0911,304.75,304.89,2.65
34 | GC19-0911,346.85,347,2.73
35 | GC19-0912,229.4,229.57,2.65
36 | GC19-0912,307.98,308.15,3.02
37 | GC19-0912,375,375.13,2.69
38 | GC19-0912,434.74,434.88,2.64
39 | GC19-0912,471,471.15,2.63
40 | GC19-0912,509.84,510,2.65
41 | GC19-0913,43.55,43.7,2.65
42 | GC19-0913,53,53.1,2.64
43 | GC19-0913,223.55,223.7,2.74
44 | GC19-0913,273.1,273.25,2.73
45 | GC19-0913,283.7,283.83,2.98
46 | GC19-0913,287.85,288,2.68
47 | GC19-0913,323.5,323.65,2.79
48 | GC19-0914,134.5,134.65,2.59
49 | GC19-0914,192,192.15,2.72
50 | GC19-0914,219,219.3,2.82
51 | GC19-0914,239.65,239.8,2.58
52 | GC19-0914,266.75,267,2.8
53 | GC19-0914,282.13,282.3,2.92
54 | GC19-0914,441,441.19,2.87
55 | GC19-0914,486,486.17,2.65
56 | GC19-0915,155.86,156,2.55
57 | GC19-0916,99.25,99.45,2.36
58 | GC19-0916,147.47,147.7,2.44
59 | GC19-0916,205.2,205.38,2.53
60 | GC19-0916,306.91,307.12,2.65
61 | GC19-0916,350.75,350.85,2.66
62 | GC19-0916,501,501.16,2.7
63 | GC19-0916,551.84,552,2.6
64 | GC19-0916,559.73,559.9,2.7
65 | GC19-0916,694.77,694.92,2.07
66 | GC19-0916,740.7,740.85,2.81
67 | GC19-0916,763.6,763.75,2.77
68 | GC19-0916,827.62,827.8,2.67
69 | GC19-0916,869.54,869.7,2.84
70 | GC19-0917,62.85,63,2.53
71 | GC19-0917,99.19,99.35,2.66
72 | GC19-0917,150.95,151.12,2.72
73 | GC19-0917,186,186.14,2.62
74 | GC19-0917,243,243.1,2.7
75 | GC19-0917,282.6,282.85,2.63
76 | GC19-0917,321.5,321.63,3.01
77 | GC19-0917,356.89,357,2.79
78 | GC19-0917,381.52,381.65,2.9
79 | GC19-0917,422.16,422.32,2.69
80 | GC19-0917,473.47,473.62,2.67
81 | GC19-0917,522.8,522.9,2.62
82 | GC19-0918,150.82,151,2.44
83 | GC19-0918,197.82,198,2.64
84 | GC19-0918,245.48,245.67,2.92
85 | GC19-0918,255.5,255.63,2.68
86 | GC19-0918,300,300.16,2.83
87 | GC19-0918,339,339.15,1.82
88 | GC19-0918,356.13,356.29,2.71
89 | GC19-0918,407.48,407.62,3.1
90 | GC19-0918,444.15,444.28,2.69
91 | GC19-0918,488.53,488.67,2.66
92 | GC19-0918,525.16,525.31,2.56
93 | GC19-0918,561.41,561.56,2.63
94 | GC19-0918,599.06,599.26,2.7
95 | GC19-0918,649.2,649.4,2.76
96 | GC19-0918,700.48,700.6,2.94
97 | GC19-0918,755.83,756,2.7
98 | GC19-0918,802.8,802.97,2.79
99 | GC19-0919,49.1,49.23,4.05
100 | GC19-0919,123,123.15,2.68
101 | GC19-0919,129,129.12,2.68
102 | GC19-0919,149.15,149.3,2.6
103 | GC19-0919,163.53,163.63,2.69
104 | GC19-0919,201,201.18,1.96
105 | GC19-0919,216.85,217,2.74
106 | GC19-0919,249,249.15,2.68
107 | GC19-0919,300.45,300.6,2.72
108 | GC19-0919,350.28,350.46,2.74
109 | GC19-0919,398.7,398.82,2.75
110 | GC19-0919,446.14,446.34,2.7
111 | GC19-0920,141.7,141.85,2.61
112 | GC19-0920,170.84,170.92,2.76
113 | GC19-0920,190.35,190.47,2.68
114 | GC19-0920,197.9,198,2.72
115 | GC19-0920,205,205.12,2.98
116 | GC19-0920,214.26,214.43,2.71
117 | GC19-0920,219.46,219.6,2.59
118 | GC19-0920,239.8,239.93,2.61
119 | GC19-0920,264.57,264.7,2.68
120 | GC19-0920,269.79,269.91,2.73
121 | GC19-0921,46.95,47.06,2.59
122 | GC19-0921,55.08,55.23,2.62
123 | GC19-0921,59.1,59.2,2.66
124 | GC19-0921,61.6,61.72,2.63
125 | GC19-0921,76.03,76.15,2.73
126 | GC19-0921,86.56,86.68,2.73
127 | GC19-0921,131.7,131.86,2.73
128 | GC19-0921,154.86,155.02,2.66
129 | GC19-0921,166.36,166.54,2.76
130 | GC19-0921,178.94,179.06,2.68
131 | GC19-0921,222,222.15,2.71
132 | GC19-0921,232.65,232.78,2.69
133 | GC19-0921,240.87,241.02,2.79
134 | GC19-0921,242.6,242.75,2.68
135 | GC19-0922,142.1,142.25,2.68
136 | GC19-0922,197.54,197.73,2.66
137 | GC19-0922,251.25,251.4,2.57
138 | GC19-0922,299.38,299.5,2.78
139 | GC19-0922,347.8,348,2.72
140 | GC19-0922,400.5,400.65,2.72
141 | GC19-0923,74.35,74.45,2.58
142 | GC19-0923,99.86,99.99,2.73
143 | GC19-0923,144,144.12,2.68
144 | GC19-0923,175.16,175.29,2.68
145 | GC19-0923,242.4,242.5,2.93
146 | GC19-0923,288.45,288.57,2.66
147 | GC19-0923,329.75,329.88,2.64
148 | GC19-0923,380.55,380.68,3.07
149 | GC19-0923,453.2,453.35,3.17
150 | GC19-0923,499.26,499.41,2.69
151 | GC19-0923,536.85,537,2.71
152 | GC19-0923,544.1,544.25,2.69
153 | GC19-0923,565.32,565.5,2.85
154 | GC19-0923,607.55,607.67,2.69
155 | GC19-0923,654,654.1,2.67
156 | GC19-0924,119.6,119.75,2.62
157 | GC19-0924,231.1,231.27,2.69
158 | GC19-0924,235.03,235.17,2.68
159 | GC19-0924,251.83,252,2.79
160 | GC19-0924,305.1,305.2,2.74
161 | GC19-0924,332.12,332.24,2.74
162 | GC19-0924,347.35,347.5,2.71
163 | GC19-0924,364.05,364.2,2.7
164 | GC19-0924,399.6,399.85,2.7
165 | GC19-0924,423.4,423.55,2.63
166 | GC19-0924,438.35,438.45,2.76
167 | GC19-0924,486.65,486.8,2.89
168 | GC19-0924,498.48,498.58,2.68
169 | GC19-0925,101.4,101.55,2.67
170 | GC19-0925,149.67,149.85,2.66
171 | GC19-0925,200.41,200.54,2.67
172 | GC19-0925,250.45,250.61,2.72
173 | GC19-0925,301.42,301.58,2.77
174 | GC19-0926,149.84,150,2.57
175 | GC19-0926,199.55,199.7,2.79
176 | GC19-0926,251.71,251.9,2.74
177 | GC19-0926,301.28,301.47,2.73
178 | GC19-0926,348.37,348.54,2.74
179 | GC19-0926,399.13,399.3,2.64
180 | GC19-0926,450,450.15,2.6
181 | GC19-0926,503,503.15,2.61
182 | GC19-0926,552.19,552.31,2.68
183 | GC19-0926,598.15,598.3,2.68
184 | GC19-0926,641.8,642,2.77
185 | GC19-0927,95,95.15,2.66
186 | GC19-0927,150,150.2,2.58
187 | GC19-0927,203.1,203.23,
188 | GC19-0927,254.8,254.93,2.65
189 | GC19-0927,295.6,295.74,2.66
190 | GC19-0927,308.45,308.63,2.69
191 | GC19-0928,233.42,233.56,2.68
192 | GC19-0928,281.75,281.9,2.64
193 | GC19-0928,295.7,295.85,2.73
194 | GC19-0928,338.75,338.9,2.8
195 | GC19-0928,382.8,382.92,2.78
196 | GC19-0928,444,444.1,2.64
197 | GC19-0929A,99.6,99.78,2.68
198 | GC19-0929A,150.56,150.72,2.72
199 | GC19-0929A,200.55,200.7,2.63
200 | GC19-0929A,249.47,249.62,2.61
201 | GC19-0929A,303.4,303.55,2.53
202 | GC19-0930,166.65,166.8,2.67
203 | GC19-0931,66.24,66.39,2.9
204 | GC19-0931,110.87,111,2.57
205 | GC19-0931,140.67,140.8,2.52
206 | GC19-0931,185.67,185.79,2.68
207 | GC19-0931,237.06,237.18,2.81
208 | GC19-0931,283.84,283.96,2.7
209 | GC19-0931,335.88,336,2.75
210 | GC19-0931,383.6,383.77,2.93
211 | GC19-0931,431.84,432,2.92
212 | GC19-0931,483,483.15,2.72
213 | GC19-0932,174,174.11,2.58
214 | GC19-0932,226.65,226.78,2.8
215 | GC19-0932,312.64,312.78,2.82
216 | GC19-0932,370.21,370.36,2.77
217 | GC19-0932,410,410.15,2.77
218 | GC19-0932,463.33,463.46,2.76
219 | GC19-0932,510,510.12,2.81
220 | GC19-0933,115,115.11,2.62
221 | GC19-0933,125.15,125.27,2.62
222 | GC19-0933,140.68,140.8,2.7
223 | GC19-0933,212.1,212.2,2.75
224 | GC19-0933,244.45,244.59,2.72
225 | GC19-0933,361.64,361.78,2.71
226 | GC19-0933,403.24,403.37,2.73
227 | GC19-0934,44.86,45,2.74
228 | GC19-0934,94.25,94.4,2.54
229 | GC19-0934,124,124.15,2.57
230 | GC19-0934,169.88,170.02,2.76
231 | GC19-0934,231,231.15,2.77
232 | GC19-0934,272.86,273,2.71
233 | GC19-0934,330.92,331.05,2.75
234 | GC19-0934,371.09,371.22,2.72
235 | GC19-0935,139.11,139.25,2.57
236 | GC19-0935,188.14,188.26,2.75
237 | GC19-0935,237.57,237.71,2.87
238 | GC19-0935,287.78,287.96,2.8
239 | GC19-0935,336.54,336.64,2.74
240 | GC19-0935,388.45,388.6,2.73
241 | GC19-0935,444.45,444.6,2.79
242 | GC19-0935,488.9,489,2.85
243 | GC19-0936,135.57,135.72,2.55
244 | GC19-0936,180,180.17,2.65
245 | GC19-0936,228.48,228.62,2.63
246 | GC19-0936,282.76,282.9,2.68
247 | GC19-0936,319.71,319.88,2.73
248 | GC19-0937,212.24,212.4,2.67
249 | GC19-0937,251.34,251.53,2.72
250 | GC19-0937,301,301.17,2.73
251 | GC19-0937,353.15,353.35,2.72
252 | GC19-0938,107.82,108,2.55
253 | GC19-0938,159.75,159.86,2.59
254 | GC19-0938,207.2,207.3,2.8
255 | GC19-0938,265.36,265.49,2.72
256 | GC19-0939,133.65,133.8,2.76
257 | GC19-0939,183.35,183.48,2.77
258 | GC19-0939,252.85,253,2.66
259 | GC19-0939,283.05,283.2,2.69
260 | GC19-0939,333.67,333.8,2.83
261 | GC19-0939,381.85,382,2.83
262 | GC19-0939,421.3,421.45,2.67
263 | GC19-0939,473.9,474,2.65
264 | GC19-0940,75,75.12,2.55
265 | GC19-0940,129.98,130.1,2.6
266 | GC19-0940,182.11,182.22,2.73
267 | GC19-0940,232.07,232.19,2.66
268 | GC19-0940,277.68,277.71,2.71
269 | GC19-0940,336.09,336.24,2.76
270 | GC19-0940,382.25,382.37,2.78
271 | GC19-0940,419.46,419.58,2.81
272 | GC19-0941,107.88,108,2.68
273 | GC19-0941,158.41,158.55,2.52
274 | GC19-0941,177.38,177.52,2.58
275 | GC19-0941,228.33,228.47,2.68
276 | GC19-0941,262.34,262.48,2.78
277 | GC19-0941,292.4,292.57,2.74
278 | GC19-0942,50.36,50.5,2.79
279 | GC19-0942,111.9,112.03,2.58
280 | GC19-0942,162.56,162.7,2.46
281 | GC19-0942,220.62,220.76,2.67
282 | GC19-0942,266.12,266.25,2.7
283 | GC19-0942,307.87,308,2.79
284 | GC19-0943,102.6,102.76,2.61
285 | GC19-0943,146.3,146.44,2.74
286 | GC19-0943,264.21,264.37,2.73
287 | GC19-0943,309.8,309.96,2.81
288 | GC19-0943,360.43,360.58,2.8
289 | GC19-0943,400.6,400.75,2.86
290 | GC19-0943,449.77,449.92,2.74
291 | GC19-0943,503.83,504,2.69
292 | GC19-0943,558,558.13,2.68
293 | GC19-0943,599.5,599.65,2.65
294 | GC19-0943,650.15,650.3,2.72
295 | GC19-0944,20.8,21,2.75
296 | GC19-0944,75.7,75.83,2.79
297 | GC19-0944,119.75,119.85,2.81
298 | GC19-0944,185.2,185.34,2.81
299 | GC19-0944,231.43,231.53,2.7
300 | GC19-0944,292.32,292.43,2.86
301 | GC19-0944,340.85,340.95,2.9
302 | GC19-0944,345.38,345.49,2.89
303 | GC19-0944,388.59,388.7,3
304 | GC19-0944,431.75,431.87,2.85
305 | GC19-0945,267.15,267.3,2.66
306 | GC19-0945,317.85,318,2.71
307 | GC19-0945,375,375.15,2.69
308 | GC19-0945,414.54,414.69,2.75
309 | GC19-0946,51.3,51.45,2.63
310 | GC19-0946,100.88,101,2.48
311 | GC19-0946,197.38,197.51,2.78
312 | GC19-0946,252,252.15,3.06
313 | GC19-0946,350.6,350.75,2.81
314 | GC19-0946,399.67,399.8,3.08
315 | GC19-0946,449.86,450,2.96
316 | GC19-0946,499.85,500,2.8
317 | GC19-0947,57,57.12,2.65
318 | GC19-0947,108.45,108.58,2.64
319 | GC19-0947,144.47,144.57,2.65
320 | GC19-0947,164.88,165,2.65
321 | GC19-0947,202.37,202.47,2.66
322 | GC19-0947,257.63,257.77,2.64
323 | GC19-0947,307.75,307.89,2.73
324 | GC19-0947,359.86,359.94,2.63
325 | GC19-0947,408,408.12,2.67
326 | GC19-0947,443.88,444,2.94
327 | GC19-0947,459.16,459.3,2.66
328 | GC19-0947,468,468.14,2.72
329 | GC19-0947,489,489.14,2.79
330 | GC19-0947,509.9,510,2.72
331 | GC19-0947,527.63,527.75,2.73
332 | GC19-0947,579.7,579.8,2.72
333 | GC19-0947,630.09,630.21,2.68
334 | GC19-0947,635.88,636,2.67
335 | GC19-0947,668.9,669,2.69
336 | GC19-0947,762.64,762.75,2.72
337 | GC19-0948,148.73,148.88,2.59
338 | GC19-0948,211.9,212,2.72
339 | GC19-0948,307.22,307.4,2.73
340 | GC19-0948,366.55,366.7,2.71
341 | GC19-0948,418.85,419.05,2.67
342 | GC19-0948,483,483.15,2.73
343 | GC19-0948,530.5,530.65,2.72
344 | GC19-0948,611.9,612,2.69
345 | SRK19-023,2,3,2.61
346 | SRK19-023,3,4,2.62
347 | SRK19-023,4,5,2.59
348 | SRK19-023,103.88,104.07,2.64
349 | SRK19-023,157.62,157.82,2.74
350 | SRK19-023,215.89,216.11,2.58
351 | SRK19-023,321.24,321.43,2.78
352 | SRK19-023,371.5,371.63,2.7
353 | SRK19-023,466.28,466.46,2.7
354 | SRK19-023,518.3,518.42,2.72
355 | SRK19-024,5.5,5.59,2.55
356 | SRK19-024,6.59,6.75,2.44
357 | SRK19-024,34.09,34.17,2.61
358 | SRK19-024,34.17,34.25,2.6
359 | SRK19-024,35.19,35.28,2.48
360 | SRK19-024,41.16,41.26,2.58
361 | SRK19-024,41.87,41.97,2.67
362 | SRK19-024,42.13,42.22,2.62
363 | SRK19-024,42.25,42.33,2.67
364 | SRK19-024,42.38,42.46,2.6
365 | SRK19-024,117.83,118,2.64
366 | SRK19-024,165.15,165.32,2.67
367 | SRK19-024,188.35,188.5,2.68
368 | SRK19-024,221.4,221.5,2.71
369 | SRK19-024,244,244.15,2.71
370 | SRK19-024,276.81,277,2.66
371 | SRK19-024,305.5,305.67,2.82
372 | SRK19-024,323.5,323.68,2.62
373 | SRK19-024,331.69,331.86,2.9
374 | SRK19-024,348.5,348.64,2.64
375 | SRK19-024,363.75,363.89,
376 | SRK19-024,391.73,391.87,2.71
377 | SRK19-024,402.88,403,2.71
378 | SRK19-024,409.48,409.64,2.61
379 | SRK19-024,426.89,427,2.55
380 | SRK19-025,7,7.14,2.74
381 | SRK19-025,7.41,7.57,2.65
382 | SRK19-025,8.16,8.27,2.81
383 | SRK19-025,29.95,30.07,2.59
384 | SRK19-025,30.07,30.16,2.57
385 | SRK19-025,100.49,100.59,2.56
386 | SRK19-025,100.93,101.05,2.79
387 | SRK19-025,101.26,101.35,2.64
388 | SRK19-025,101.35,101.43,2.64
389 | SRK19-025,469,469.17,2.67
390 | SRK19-025,517.81,518,2.72
391 | SRK19-025,551.3,551.41,2.73
392 | SRK19-025,558.06,558.23,2.59
393 | SRK19-025,598.22,598.32,2.64
394 | SRK19-026,51.9,52.08,2.57
395 | SRK19-026,52.1,52.29,2.56
396 | SRK19-026,52.79,52.86,2.47
397 | SRK19-026,68.78,68.97,2.62
398 | SRK19-026,84.19,84.33,2.64
399 | SRK19-026,112.52,112.65,2.41
400 | SRK19-026,131.25,131.4,2.65
401 | SRK19-026,139.73,139.92,2.58
402 | SRK19-026,164.25,164.44,3.1
403 | SRK19-026,206.91,207.06,2.88
404 | SRK19-026,233.4,233.54,2.78
405 | SRK19-026,261.13,261.26,2.72
406 | SRK19-026,283.43,283.62,3.03
407 | SRK19-026,323.19,323.35,2.98
408 | SRK19-026,369.48,369.62,2.94
409 | SRK19-026,419.84,420.01,2.68
410 | SRK19-027,93.04,93.17,2.69
411 | SRK19-027,119.95,120.06,2.52
412 | SRK19-027,120.65,120.8,2.54
413 | SRK19-027,145,145.15,2.62
414 | SRK19-027,168.02,168.13,2.64
415 | SRK19-027,168.63,168.73,2.63
416 | SRK19-027,168.81,168.88,2.65
417 | SRK19-027,180.51,180.63,2.13
418 | SRK19-027,212,212.11,2.57
419 | SRK19-027,216.5,216.64,2.64
420 | SRK19-027,216.91,217.06,2.65
421 | SRK19-027,222.86,223,2.67
422 | SRK19-027,234.04,234.15,2.61
423 | SRK19-027,256,256.09,2.65
424 | SRK19-027,256.59,256.72,2.63
425 | SRK19-027,256.94,257.04,2.62
426 | SRK19-027,266.9,267.02,2.59
427 | SRK19-027,271.53,271.68,2.66
428 | SRK19-027,280.78,280.91,2.83
429 | SRK19-027,310.17,310.25,2.73
430 | SRK19-027,310.51,310.59,2.59
431 | SRK19-027,310.59,310.71,2.49
432 | SRK19-027,310.71,310.81,2.59
433 | SRK19-027,335.3,335.44,2.61
434 | SRK19-027,348,348.14,2.73
435 | SRK19-027,355.46,355.61,2.7
436 | SRK19-027,374.84,374.92,2.56
437 | SRK19-027,375.19,375.26,2.53
438 | SRK19-027,375.29,375.4,2.52
439 | SRK19-027,402.78,402.92,2.75
440 | SRK19-027,449.25,449.42,4.78
441 | SRK19-028,53.65,53.77,2.24
442 | SRK19-028,54.33,54.42,2.41
443 | SRK19-028,54.45,54.56,2.38
444 | SRK19-028,83.78,83.88,2.91
445 | SRK19-028,84.03,84.1,2.78
446 | SRK19-028,112.35,112.45,2.6
447 | SRK19-028,112.8,112.88,2.6
448 | SRK19-028,120.22,120.39,2.35
449 | SRK19-028,149.6,149.75,2.5
450 | SRK19-028,160.91,161.05,2.74
451 | SRK19-028,209.28,209.43,2.54
452 | SRK19-028,239.5,239.64,2.79
453 | SRK19-028,253.77,253.9,2.76
454 | SRK19-028,299.82,300,2.72
455 | SRK19-028,340.63,340.81,2.77
456 | SRK19-028,361.11,361.3,2.73
457 | SRK19-028,394.49,394.64,2.77
458 | SRK19-028,431.32,431.5,2.7
459 | SRK19-029,735,735.2,2.98
460 | SRK19-029,783.5,783.65,2.96
461 | SRK19-029,839.8,840,2.87
462 | SRK19-030,16.06,16.3,2.44
463 | SRK19-030,16.3,16.4,2.4
464 | SRK19-030,16.6,16.74,2.55
465 | SRK19-030,74.95,75.05,2.66
466 | SRK19-030,75.23,75.31,2.67
467 | SRK19-030,75.56,75.66,2.67
468 | SRK19-030,118.2,118.25,2.52
469 | SRK19-030,118.7,118.76,2.51
470 | SRK19-030,118.76,118.82,2.54
471 | SRK19-030,179.7,179.82,2.55
472 | SRK19-030,179.89,179.97,2.51
473 | SRK19-030,219.6,219.66,2.56
474 | SRK19-030,277.85,277.92,2.63
475 | SRK19-030,277.92,277.99,2.65
476 | SRK19-030,278.1,278.17,2.65
477 | SRK19-035,20.63,20.78,2.89
478 | SRK19-035,88.7,88.85,3
479 | SRK19-035,141.9,142.05,2.82
480 | SRK19-035,191.2,191.33,2.83
481 | SRK19-035,236.2,236.34,3.03
482 | SRK19-035,286.18,286.4,2.84
483 | SRK19-035,335.35,335.5,2.83
484 | SRK19-035,382.3,382.4,2.66
485 | SRK19-035,432.86,433.03,2.87
486 | SRK19-035,496.47,496.59,2.86
487 | SRK19-035,540.07,540.16,2.87
488 | SRK19-035,581.1,581.2,2.91
489 | SRK19-035,620.04,620.2,2.83
490 | SRK19-035,667,667.14,3.06
491 | SRK19-035,724.05,724.19,2.89
492 |
--------------------------------------------------------------------------------
/DR_DC_APP/pages/03_🧭_Desurveying.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 |
3 | st.set_page_config(layout="wide")
4 |
5 | import warnings
6 |
7 | warnings.filterwarnings("ignore")
8 |
9 | col1, col2 = st.columns([4, 1])
10 |
11 | if col2.button("Reset Page"):
12 | # Clear values from *all* memoized functions:
13 | st.session_state.clear()
14 | st.rerun()
15 |
16 | col1.markdown("# 🧭 Desurveying")
17 |
18 | import pandas as pd
19 | import sys
20 | from PIL import Image
21 | import plotly.graph_objects as go
22 |
23 | sys.path.append(r"./")
24 | from desurveying import *
25 | from desurveying_utils import *
26 |
27 | from st_utils import *
28 | import numpy as np
29 |
30 | add_logo()
31 |
32 | customized_button = st.markdown(
33 | """
34 | """,
49 | unsafe_allow_html=True,
50 | )
51 |
52 | def get_best_match(
53 | list_of_options:list, # a list of strings to search through
54 | string:str, #the string that best matches
55 | )->str:
56 |
57 | if len(get_close_matches_icase(string, list_of_options)) > 0:
58 | best_match = str(list_of_options[(list_of_options).index(get_close_matches_icase(string, list_of_options)[0])])
59 | else:
60 | best_match = str(list_of_options[0])
61 |
62 | return best_match
63 |
64 | def rerun_page():
65 | st.rerun()
66 |
67 | st.sidebar.markdown("*This web application proudly brought to you by **[Datarock](https://www.datarock.com.au)***")
68 |
69 | st.markdown(
70 | """
71 | This section of the app performs desurveying on drillhole data.
72 | Desurveying is the process of converting drillhole collar and survey information into downhole X, Y, Z geographical locations.
73 | It requires two files to run, firstly, a drillhole collar file containing the Hole ID, Eastings, Northings, Relative Sea Level (RL) and Total Depths (TD) and secondly a survey file containing the Hole ID, Survey Depth, Azimuth and Dip/Inclination.
74 | Desurveying is achieved by performing the following steps.
75 |
76 | ###### 1. Selecting Interval Option to Composite/Join To
77 | - This will be the interval that you desurvey. It can either be an interval set from an existing dataset (ie lithology or assay data) or it can be a select uniform interval (1m, 2m, 0.5m etc) starting from 0m. It is recommended that if you are compositing data as well, composite your data first and then desurvey the composited dataset.
78 |
79 | ###### 2. Select the Collar and Survey Data
80 | - Here the user selects the collar and survey .csv files and assigns the correct column name drop down for both files.
81 |
82 | ###### 3. Select the Desurveying Method
83 | - There are two methods to choose from when desurveying drillhole data, Minimum Curvature (recommended) or a Tangential method.
84 |
85 | """
86 | )
87 |
88 | ######################################
89 | # Selecting Interval to join to
90 | st.markdown("#### 1. Selecting Interval to Desurvey")
91 | st.markdown(
92 | "There are two options to choose from when Desurveying data, you can either desurvey to an existing interval (such as lithology or assay intervals from an existing table) or you can create your own standard interval across the dataset (eg 1m)"
93 | )
94 | des_int = st.radio("Select Interval Option", ("Standard Interval", "Existing Interval"))
95 |
96 | if des_int == "Existing Interval":
97 | interval_file = st.file_uploader("Choose a csv file with interval data")
98 |
99 | # Stop if file not selected
100 | if not interval_file:
101 | st.stop()
102 |
103 | if "int_info" not in st.session_state:
104 | st.session_state["int_info"] = []
105 |
106 | # Read in file and show dataframe
107 | try:
108 | dfi = pd.read_csv(interval_file)
109 | except:
110 | dfi = pd.read_csv(interval_file, encoding="cp1252")
111 | st.dataframe(dfi.head())
112 |
113 | st.markdown("Select the HoleID, From and To columns from the existing interval table")
114 | form_name = 'Select_Interval_Data'
115 | with st.form(form_name, clear_on_submit=False):
116 | container = st.container()
117 |
118 | if f'session_state_besthole_i_des' not in st.session_state:
119 | st.session_state[f'session_state_besthole_i_des'] = get_best_match(list(dfi.columns), 'HOLE')
120 | if f'session_state_bestfro_i_des' not in st.session_state:
121 | st.session_state[f'session_state_bestfro_i_des'] = get_best_match(list(dfi.columns), 'FROM')
122 | if f'session_state_bestto_i_des' not in st.session_state:
123 | st.session_state[f'session_state_bestto_i_des'] = get_best_match(list(dfi.columns), 'TO')
124 | if f'Inty_Check_des' not in st.session_state:
125 | st.session_state['Inty_Check_des'] = 0
126 |
127 |
128 | iHOLEID = st.selectbox("Select Interval HOLEID", list(dfi.columns), index=list(dfi.columns).index(st.session_state[f'session_state_besthole_i_des']))
129 | iFROM = st.selectbox("Select Interval From Depth", list(dfi.columns), index=list(dfi.columns).index(st.session_state[f'session_state_bestfro_i_des']))
130 | iTO = st.selectbox("Select Interval To Depth", list(dfi.columns), index=list(dfi.columns).index(st.session_state[f'session_state_bestto_i_des']))
131 |
132 | # If button selected
133 | if st.form_submit_button("Submit", on_click=rerun_page):
134 | st.session_state[f'session_state_besthole_i_des'] = iHOLEID
135 | st.session_state[f'session_state_bestfro_i_des'] = iFROM
136 | st.session_state[f'session_state_bestto_i_des'] = iTO
137 | st.session_state['Inty_Check_des'] = 1
138 |
139 | st.write("Submitted!")
140 |
141 | # Renaming columns for holeid, from and to
142 | dfi = dfi.rename(columns={st.session_state[f'session_state_besthole_i_des']: "HOLEID", st.session_state[f'session_state_bestfro_i_des']: "FROM", st.session_state[f'session_state_bestto_i_des']: "TO"})
143 |
144 | if st.session_state["Inty_Check_des"] == 0:
145 | st.stop()
146 |
147 | # If standard interval get interval length
148 | elif des_int == "Standard Interval":
149 | st.markdown("Select the interval you wish to have the data composited to (m):")
150 |
151 | if 'session_state_interval2comp_des' not in st.session_state:
152 | st.session_state['session_state_interval2comp_des'] = 0
153 |
154 | form_name = 'Select_Interval'
155 | with st.form(form_name, clear_on_submit=False):
156 | container = st.container()
157 | interval = container.text_input("Insert an interval", value=str(st.session_state['session_state_interval2comp_des']))
158 | interval = float(interval)
159 |
160 | # If button selected
161 | if st.form_submit_button("Submit", on_click=rerun_page):
162 | st.session_state['session_state_interval2comp_des'] = interval
163 | st.write("Submitted!")
164 |
165 | if st.session_state["session_state_interval2comp_des"] == 0:
166 | st.stop()
167 |
168 | ###############################################
169 | # COLLAR AND SURVEY
170 | st.markdown("#### 2. Selecting Collar and Survey Data")
171 | st.markdown("Enter the collar and survey csv files.")
172 |
173 | # Splitting into columns
174 | col1, col2 = st.columns(2)
175 |
176 | # Uploading files
177 | collar_file = col1.file_uploader("Choose a Collar File")
178 | survey_file = col2.file_uploader("Choose a Survey File")
179 |
180 | if not collar_file or not survey_file:
181 | st.stop()
182 |
183 | # Read csvs
184 | try:
185 | cdf = pd.read_csv(collar_file)
186 | sdf = pd.read_csv(survey_file)
187 | except:
188 | cdf = pd.read_csv(collar_file, encoding="cp1252")
189 | sdf = pd.read_csv(survey_file, encoding="cp1252")
190 |
191 | if "col_info" not in st.session_state:
192 | st.session_state["col_info"] = []
193 |
194 | with st.form("Collar Information Selection"):
195 | st.markdown("###### Select all the appropriate columns from the collar and survey table.")
196 | col1, col2 = st.columns(2)
197 |
198 | cHOLEIDs = cdf.columns
199 | if len(get_close_matches_icase("HOLE", cHOLEIDs)) > 0:
200 | best_match = list(cHOLEIDs).index(get_close_matches_icase("HOLE", cHOLEIDs)[0])
201 | else:
202 | best_match = 0
203 | cHOLEID = col1.selectbox("Select Collar Hole/Collar ID", cHOLEIDs, best_match)
204 |
205 | EASTs = cdf.columns
206 | if len(get_close_matches_icase("EAST", EASTs)) > 0:
207 | best_match = list(EASTs).index(get_close_matches_icase("EAST", EASTs)[0])
208 | else:
209 | best_match = 0
210 | EAST = col1.selectbox("Select Collar Easting", EASTs, best_match)
211 |
212 | NORTHs = cdf.columns
213 | if len(get_close_matches_icase("NORTH", NORTHs)) > 0:
214 | best_match = list(NORTHs).index(get_close_matches_icase("NORTH", NORTHs)[0])
215 | else:
216 | best_match = 0
217 | NORTH = col1.selectbox("Select Collar Northing", NORTHs, best_match)
218 |
219 | RLs = cdf.columns
220 | if len(get_close_matches_icase("RL", RLs)) > 0:
221 | best_match = list(RLs).index(get_close_matches_icase("RL", RLs)[0])
222 | else:
223 | best_match = 0
224 | RL = col1.selectbox("Select Collar Relative Sea Level", RLs, best_match)
225 |
226 | TDs = cdf.columns
227 | if len(get_close_matches_icase("DEPTH", TDs)) > 0:
228 | best_match = list(TDs).index(get_close_matches_icase("DEPTH", TDs)[0])
229 | else:
230 | best_match = 0
231 | TD = col1.selectbox("Select Collar Total Depth", TDs, best_match)
232 |
233 | sHOLEIDs = sdf.columns
234 | if len(get_close_matches_icase("HOLE", sHOLEIDs)) > 0:
235 | best_match = list(sHOLEIDs).index(get_close_matches_icase("HOLE", sHOLEIDs)[0])
236 | else:
237 | best_match = 0
238 | sHOLEID = col2.selectbox("Select Survey Hole/Collar ID", sHOLEIDs, best_match)
239 |
240 | DEPTHs = sdf.columns
241 | if len(get_close_matches_icase("DEPTH", DEPTHs)) > 0:
242 | best_match = list(DEPTHs).index(get_close_matches_icase("DEPTH", DEPTHs)[0])
243 | else:
244 | best_match = 0
245 | DEPTH = col2.selectbox("Select Survey Depth", DEPTHs, best_match)
246 |
247 | AZIMs = sdf.columns
248 | if len(get_close_matches_icase("AZI", AZIMs)) > 0:
249 | best_match = list(AZIMs).index(get_close_matches_icase("AZI", AZIMs)[0])
250 | else:
251 | best_match = 0
252 | AZIM = col2.selectbox("Select Survey Azimuth", AZIMs, best_match)
253 |
254 | DIPs = sdf.columns
255 | if len(get_close_matches_icase("DIP", DIPs)) > 0:
256 | best_match = list(DIPs).index(get_close_matches_icase("DIP", DIPs)[0])
257 | else:
258 | best_match = 0
259 | DIP = col2.selectbox("Select Survey Dip", DIPs, best_match)
260 |
261 | if st.form_submit_button("Submit"):
262 | st.session_state["col_info"] = [cHOLEID, EAST, NORTH, RL, TD, sHOLEID, DEPTH, NORTH, AZIM, DIP]
263 | st.write("Submitted!")
264 | st.rerun()
265 |
266 | if len(st.session_state["col_info"]) == 0:
267 | st.stop()
268 |
269 | # Rename columns
270 | sdf = sdf.rename(columns={sHOLEID: "HOLEID", DEPTH: "DEPTH", AZIM: "AZIM", DIP: "DIP"})
271 | cdf = cdf.rename(columns={cHOLEID: "HOLEID", EAST: "EAST", NORTH: "NORTH", RL: "RL", TD: "TD"})
272 |
273 | check_streamlit_desurvey_holes(cdf, sdf)
274 |
275 | # Merging Data
276 | adf = pd.merge(
277 | sdf[["HOLEID", "DEPTH", "AZIM", "DIP"]],
278 | cdf[["HOLEID", "EAST", "NORTH", "RL", "TD"]],
279 | on="HOLEID",
280 | )
281 |
282 | missing_data_holes = [hole for hole in adf[adf.isna().any(axis=1)]["HOLEID"].unique()]
283 |
284 | if len(missing_data_holes) > 0:
285 | st.write(
286 | """
287 | ##### Missing Data
288 | """
289 | )
290 | misso = ""
291 | for i in missing_data_holes:
292 | misso += "- " + str(i) + "\n"
293 |
294 | st.write("The following holes contain NaN values in the collar or survey and will be removed 😭")
295 | st.markdown(misso)
296 |
297 | adf = adf[~adf.isna().any(axis=1)].reset_index(drop=True)
298 |
299 | # Unwrapping azimuth AFTER removal
300 | adf["AZIM"] = np.unwrap(adf["AZIM"], period=360)
301 |
302 | # Setting from and to column names
303 | FROM = "FROM"
304 | TO = "TO"
305 | ###################################
306 |
307 | st.markdown("#### 3. Desurveying Method")
308 | st.markdown(
309 | """There are two methods to choose from when desurveying drillhole data, Minimum Curvature (recommended) or a Tangential method.
310 | The Tangential method assumes that the drillhole continues in a straight line until the next survey point is recorded and adjusts its direction accordingly.
311 | The Minimum Curvature method smooths the straight line segments, giving a better approximation of how the drill trace would look in real life."""
312 | )
313 |
314 | image = Image.open("DR_DC_APP/Images/desur.png")
315 | st.image(image, caption="Figure 1. Diagrams of the two desurveying methods to choose from.")
316 |
317 | des_meth = st.radio("Select Desurveying Method (Minimum Curvature Recommended)", ("Minimum Curvature", "Tangential"))
318 |
319 | if des_meth == "Minimum Curvature":
320 | des_type = "min_curve"
321 | else:
322 | des_type = "tang"
323 |
324 | if "des_button" not in st.session_state:
325 | st.session_state["des_button"] = []
326 |
327 | if st.session_state["des_button"] != "clicked":
328 | # _, col2, _ = st.columns([3.6, 3, 2])
329 | if not st.button("DESURVEY DATA"):
330 | st.stop()
331 | else:
332 | st.session_state["des_button"] = "clicked"
333 |
334 | if st.session_state["des_button"] == "clicked":
335 | # _, col2, _ = st.columns([3.6, 3, 2])
336 | if st.button("Reset Desurvey"):
337 | del st.session_state["des_button"]
338 | st.session_state.clear()
339 | st.rerun()
340 |
341 | with st.spinner("Desurveying Data"):
342 | if des_int == "Existing Interval":
343 |
344 | check_streamlit_holes(adf, dfi)
345 |
346 | @st.cache(show_spinner=False, max_entries=1)
347 | def st_desurvey_df_from_df_custom(adf):
348 | # block raising an exception
349 | desurveyed_all = desurvey_df_from_df_custom(
350 | adf,
351 | dfi,
352 | desurvey_type=des_type, merge_data=True)
353 | return desurveyed_all
354 |
355 | desurveyed_all = st_desurvey_df_from_df_custom(adf)
356 |
357 | # try:
358 |
359 | # @st.cache(show_spinner=False, max_entries=1)
360 | # def st_desurvey_df_from_df_custom(adf):
361 | # # block raising an exception
362 | # desurveyed_all = desurvey_df_from_df_custom(adf, dfi, desurvey_type=des_type, merge_data=True)
363 | # return desurveyed_all
364 |
365 | # desurveyed_all = st_desurvey_df_from_df_custom(adf)
366 | # except:
367 | # st.markdown("#### ERROR: Unable to Desurvey - Check input data")
368 | # st.stop()
369 |
370 | st.dataframe(desurveyed_all)
371 |
372 | elif des_int == "Standard Interval":
373 |
374 | try:
375 |
376 | @st.cache(show_spinner=False, max_entries=1)
377 | def st_desurvey_df_from_interval(adf):
378 | # block raising an exception
379 | desurveyed_all = adf.groupby("HOLEID").apply(desurvey_hole_to_interval, interval=interval, desurvey_type=des_type).reset_index(drop=False).drop("level_1", axis=1)
380 | return desurveyed_all
381 |
382 | desurveyed_all = st_desurvey_df_from_interval(adf)
383 | except:
384 | st.markdown("#### ERROR: Unable to Desurvey - Check input data")
385 | st.stop()
386 |
387 | st.dataframe(desurveyed_all)
388 |
389 |
390 | ########################################
391 | # Download
392 |
393 | title_alignment = """
394 |
399 | """
400 |
401 | text = " Download Desurveyed Dataframe"
402 | st.markdown(f"###
{text}
", unsafe_allow_html=True)
403 |
404 | # st.markdown("### Downloading Composited Dataframe")
405 |
406 |
407 | @st.cache(show_spinner=False, max_entries=1)
408 | def convert_df(df):
409 | # IMPORTANT: Cache the conversion to prevent computation on every rerun
410 | return df.to_csv(index=False).encode("utf-8")
411 |
412 |
413 | # _, col2, _ = st.columns([3.2, 3, 2])
414 | csv = convert_df(desurveyed_all)
415 | st.download_button(
416 | label="Download Desurveyed Data",
417 | data=csv,
418 | file_name="desurveyed_data.csv",
419 | mime="text/csv",
420 | )
421 |
422 | ########################################
423 |
424 | st.markdown("# Plotting Check")
425 |
426 | container = st.container()
427 | all = st.checkbox("Select all")
428 |
429 | if all:
430 | selected_options = container.multiselect(
431 | "Select Drillholes to Plot:",
432 | np.unique(desurveyed_all["HOLEID"]),
433 | np.unique(desurveyed_all["HOLEID"]),
434 | )
435 | else:
436 | selected_options = container.multiselect("Select Drillholes to Plot:", np.unique(desurveyed_all["HOLEID"]))
437 |
438 | df = desurveyed_all[desurveyed_all["HOLEID"].isin(selected_options)]
439 |
440 | collar_labels = st.checkbox("Collar Labels", True)
441 |
442 | fig = go.Figure()
443 | # Loop through individual holes and add trace
444 | for idx, holeid in enumerate(df["HOLEID"].unique()):
445 |
446 | # Get unique hole
447 | dfhole = df[df["HOLEID"] == holeid]
448 |
449 | # Add trace with mapped lithology as colour
450 | fig.add_trace(
451 | go.Scatter3d(
452 | x=dfhole["EAST_md"],
453 | y=dfhole["NORTH_md"],
454 | z=dfhole["RL_md"],
455 | mode="lines",
456 | name=holeid,
457 | hoverinfo=["text"],
458 | line=dict(width=20),
459 | )
460 | )
461 |
462 | if collar_labels:
463 | df0 = df[df["FROM"] == 0].reset_index(drop=True)
464 | ann = [dict(x=x, y=y, z=z, text=holeid, showarrow=False) for x, y, z, holeid in zip(df0["EAST_md"], df0["NORTH_md"], df0["RL_md"] + 10, df0["HOLEID"])]
465 | fig.update_layout(scene=dict(annotations=ann))
466 |
467 | fig.update_layout(width=1200, height=1200, showlegend=False, legend_title_text="Cluster_ID")
468 | fig.update_layout(scene=dict(xaxis_title="Easting", yaxis_title="Northing", zaxis_title="RL"))
469 | fig.update_layout(scene_aspectmode="data", scene_aspectratio=dict(x=1, y=1, z=1))
470 |
471 |
472 | st.plotly_chart(fig, use_container_width=True, theme=None)
473 |
474 |
475 | # def convert_df(df):
476 | # return df.to_csv().encode('utf-8')
477 | # csv = convert_df(df)
478 | # st.download_button(
479 | # "Press to Download",
480 | # csv,
481 | # "Desurveyed_Holes.csv",
482 | # "text/csv",
483 | # key='download-csv'
484 | # )
485 |
486 | ########################################
487 |
488 | # with st.form("Submission", clear_on_submit=False):
489 | # st.markdown("Click the button below when you are happy with the desurveying results.")
490 | # if st.form_submit_button("Submit Desurveyed Data"):
491 | # st.session_state["sesh_desurveyed_df"] = desurveyed_all
492 | # st.write("Submitted!")
493 |
--------------------------------------------------------------------------------
/desurveying_utils.py:
--------------------------------------------------------------------------------
1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/Desurveying/01_desurveying_utils.ipynb.
2 |
3 | # %% auto 0
4 | __all__ = ['add_90_surveys', 'convert_columns_to_numerical', 'combine_collar_survey', 'add_zero_and_max_depths',
5 | 'tangent_desurvey', 'min_curve_desurvey', 'plot_desurvey_comparison', 'linear_interpolation_desurvey',
6 | 'spherical_interpolation_desurvey', 'desurvey_to_interval', 'apply_geographical_transform',
7 | 'desurvey_hole_to_interval', 'check_depths_custom', 'desurvey_hole_custom',
8 | 'merge_desurveyed_and_data_custom', 'desurvey_df_from_df_custom']
9 |
10 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 2
11 | import warnings
12 | warnings.filterwarnings("ignore")
13 |
14 | import pandas as pd
15 | import numpy as np
16 | import math
17 | import itertools
18 | import plotly.graph_objects as go
19 | from plotly.subplots import make_subplots
20 | import plotly.express as px
21 | from scipy import interpolate
22 | from scipy.interpolate import interpn
23 | from decimal import Decimal
24 |
25 | from drillhole_utils import *
26 | from decimal import *
27 | getcontext().prec = 1000
28 |
29 |
30 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 3
31 | def add_90_surveys(
32 | cdf:pd.DataFrame, # Collar data
33 | sdf:pd.DataFrame, # Survey data
34 | )->pd.DataFrame:
35 | "Adding vertical holes to missing data"
36 |
37 | chole_list = cdf["HOLEID"].unique()
38 | shole_list = sdf["HOLEID"].unique()
39 |
40 | cgot = [h for h in chole_list if h not in shole_list]
41 | #sgot = [h for h in shole_list if h not in chole_list]
42 |
43 | scdf = cdf[cdf['HOLEID'].isin(cgot)]
44 | scdf = scdf[scdf['RL'].notna() & scdf['TD'].notna()]
45 | scdf['DIP'] = -90
46 | scdf['AZIM'] = 0
47 | scdf['DEPTH'] = scdf['TD']
48 |
49 | sdf = pd.concat([sdf, scdf[['HOLEID', 'DEPTH', 'AZIM', 'DIP']]])
50 | return sdf
51 |
52 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 4
53 | def convert_columns_to_numerical(df: pd.DataFrame) -> pd.DataFrame:
54 | """
55 | Converts columns in a pandas DataFrame to numerical data types if possible,
56 | and leaves them as object types otherwise.
57 | """
58 | converted_df = df.copy()
59 |
60 | for column in converted_df.columns:
61 | try:
62 | converted_df[column] =converted_df[column].astype(float)
63 | except ValueError:
64 | pass # If conversion fails, leave the column as object type
65 |
66 | return converted_df
67 |
68 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 5
69 | def combine_collar_survey(cdf: pd.DataFrame, # Pandas dataframe containing the collar information
70 | sdf: pd.DataFrame, # Pandas dataframe containing the survey information
71 | cHOLEID: str, # Collar HoleID column name
72 | EAST: str, # Collar Easting/Longitude column name
73 | NORTH: str, # Collar Northing/Latitude column name
74 | RL: str, # Collar Relative Sea Level/Z column name
75 | TD: str, # Collar total depth column name
76 | sHOLEID: str, # Survey HoleID column name
77 | AZIM: str, # Survey Azimuth column name
78 | DIP: str, # Survey Dip column name
79 | DEPTH: str, #Survey Depth column name
80 | add_90: bool=True, #Adding survey survey info to be 90 degrees to missing data
81 | )->pd.DataFrame:
82 | """Combines the collar and survey information into a single dataframe"""
83 |
84 | # Subset and rename
85 | cdf = cdf[[cHOLEID, EAST, NORTH, RL, TD]]
86 | sdf = sdf[[sHOLEID, DEPTH, AZIM, DIP]]
87 |
88 | #Rename columns
89 | sdf = sdf.rename(columns={sHOLEID: "HOLEID", DEPTH: "DEPTH", AZIM: "AZIM", DIP: "DIP"})
90 | cdf = cdf.rename(columns={cHOLEID: "HOLEID", EAST: "EAST", NORTH: "NORTH", RL: "RL", TD: "TD"})
91 |
92 | if add_90:
93 | sdf = add_90_surveys(cdf, sdf)
94 |
95 | # Merge and unwrap azi
96 | adf = sdf.merge(cdf, on="HOLEID")
97 | adf["AZIM"] = np.unwrap(adf["AZIM"], period=360)
98 |
99 | return adf
100 |
101 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 13
102 | def add_zero_and_max_depths(adf: pd.DataFrame) -> pd.DataFrame:
103 | """
104 | Adds zero and max depth points to the end of a collar and desurvey dataframe
105 | """
106 |
107 | # Remove duplicate survey points
108 | adf = adf.drop_duplicates('DEPTH')
109 |
110 | # Add zero data if necessary
111 | if adf["DEPTH"].min() != 0:
112 | newrow = adf.iloc[0].copy()
113 | newrow["DEPTH"] = 0
114 | adf = pd.concat([newrow.to_frame().T, adf], ignore_index=True)
115 |
116 | # Add last row if depth is less than TD
117 | if adf["DEPTH"].max() < adf["TD"].max():
118 | newrow = adf.iloc[-1].copy()
119 | newrow["DEPTH"] = newrow["TD"]
120 | adf = pd.concat([adf, newrow.to_frame().T], ignore_index=True)
121 |
122 | # Update TD if necessary
123 | if adf["TD"].max() < adf["DEPTH"].max():
124 | adf.loc[adf["DEPTH"].idxmax(), "TD"] = adf["DEPTH"].max()
125 |
126 | # Reset index and sort
127 | adf = adf.sort_values(["HOLEID", "DEPTH"]).reset_index(drop=True)
128 |
129 | adf = convert_columns_to_numerical(adf)
130 |
131 | return adf
132 |
133 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 17
134 | def tangent_desurvey(
135 | depth: pd.Series, # The depth column of a dataframe to desurvey
136 | phi: pd.Series, # The azimuth column of a dataframe to desurvey
137 | theta: pd.Series, # The dip column of a dataframe to desurvey
138 | matrix: bool=False, # if you only desire each individual point (default false)
139 | )-> np.array:
140 | """Desurveys hole to xyz based on dip and azimuth using thr tangent method (copied from website, link lost)"""
141 |
142 | # get lengths of the separate segments
143 | lengths = np.array(depth)
144 | # np.subtract(depth[1:], depth[:-1])
145 | lengths[1:] -= depth[:-1]
146 | # convert to radians
147 | phi = np.deg2rad(phi)
148 |
149 | # downhole have unsigned dip values which are shorthand for negative
150 | if np.sign(theta).mean() == -1:
151 | theta *= -1
152 |
153 | # in spherical coordinates theta is measured from zenith down
154 | # you are measuring it from horizontal plane up
155 | theta = np.deg2rad(90.0 - theta)
156 |
157 | # get x, y, z from known formulae
158 | #x = lengths * np.sin(phi) * np.sin(theta)
159 | x = np.array([Decimal(x) for x in lengths]) * np.array([Decimal(x) for x in np.sin(phi)]) * np.array([Decimal(x) for x in np.sin(theta)])
160 | #y = lengths * np.cos(phi) * np.sin(theta)
161 | y = np.array([Decimal(x) for x in lengths]) * np.array([Decimal(x) for x in np.cos(phi)]) * np.array([Decimal(x) for x in np.sin(theta)])
162 | #z = lengths * np.cos(theta)
163 | z = np.array([Decimal(x) for x in lengths]) * np.array([Decimal(x) for x in np.cos(theta)])
164 |
165 | if matrix:
166 | return np.column_stack((depth, x, y, z))
167 | else:
168 | # np.cumsum is employed to gradually sum resultant vectors
169 | return np.column_stack((depth, np.cumsum(x), np.cumsum(y), abs(np.cumsum(z))))
170 |
171 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 19
172 | def min_curve_desurvey(
173 | depth: pd.Series, # The depth column of a dataframe to desurvey
174 | phi: pd.Series, # The azimuth column of a dataframe to desurvey
175 | theta: pd.Series, # The dip column of a dataframe to desurvey
176 | matrix: bool=False, # if you only desire each individual point (default false)
177 | )-> np.array:
178 | """Desurveys hole to xyz based on dip and azimuth using the minimum curvature method (copied from website, link lost)"""
179 |
180 | # get lengths of the separate segments
181 | lengths = np.array(depth)
182 | # np.subtract(depth[1:], depth[:-1])
183 | lengths[1:] -= depth[:-1]
184 |
185 | # convert to radians
186 | phi = np.deg2rad(phi)
187 |
188 | # downhole have unsigned dip values which are shorthand for negative
189 | if np.sign(theta).mean() == -1:
190 | theta *= -1
191 |
192 | # in spherical coordinates theta is measured from zenith down
193 | # you are measuring it from horizontal plane up
194 | theta = np.deg2rad(90.0 - theta)
195 |
196 | theta1, theta2 = theta, theta.shift(-1).ffill()
197 | phi1, phi2 = phi, phi.shift(-1).ffill()
198 |
199 | dl = np.arccos(np.cos(theta2 - theta1) - np.sin(theta1) * np.sin(theta2) * (1 - np.cos(phi2 - phi1)))
200 |
201 | rfs = []
202 | for dogleg in dl:
203 | if dogleg != 0.0:
204 | rf = 2 * Decimal(np.tan(dogleg / 2)) / Decimal(dogleg) # minimum curvature
205 | rfs.append(rf)
206 | else:
207 | rfs.append(1) # balanced tangential
208 |
209 | #x = 0.5 * lengths * (np.sin(theta1) * np.sin(phi1) + np.sin(theta2) * np.sin(phi2)) * rfs
210 | #y = 0.5 * lengths * (np.sin(theta1) * np.cos(phi1) + np.sin(theta2) * np.cos(phi2)) * rfs
211 | #z = 0.5 * lengths * (np.cos(theta1) + np.cos(theta2)) * rfs
212 |
213 | x = Decimal(0.5) * np.array([Decimal(x) for x in lengths]) * (np.array([Decimal(x) for x in np.sin(theta1)]) * np.array([Decimal(x) for x in np.sin(phi1)]) + np.array([Decimal(x) for x in np.sin(theta2)]) * np.array([Decimal(x) for x in np.sin(phi2)])) * rfs
214 | y = Decimal(0.5) * np.array([Decimal(x) for x in lengths]) * (np.array([Decimal(x) for x in np.sin(theta1)]) * np.array([Decimal(x) for x in np.cos(phi1)]) + np.array([Decimal(x) for x in np.sin(theta2)]) * np.array([Decimal(x) for x in np.cos(phi2)])) * rfs
215 | z = Decimal(0.5) * np.array([Decimal(x) for x in lengths]) * (np.array([Decimal(x) for x in np.cos(theta1)]) + np.array([Decimal(x) for x in np.cos(theta2)])) * rfs
216 |
217 | if matrix:
218 | return np.column_stack((depth, x, y, z))
219 | else:
220 | # np.cumsum is employed to gradually sum resultant vectors
221 | return np.column_stack((depth, np.cumsum(x), np.cumsum(y), abs(np.cumsum(z))))
222 |
223 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 21
224 | def plot_desurvey_comparison(
225 | adf: pd.DataFrame(), # Dataframe containing a single hole to desurvey
226 | )-> None:
227 | "Plots a interactive 3d plot of the drillholes in 3d space"
228 |
229 | tandf = pd.DataFrame(
230 | tangent_desurvey(adf["DEPTH"], adf["AZIM"], adf["DIP"], matrix=False),
231 | columns=["DEPTH", "X", "Y", "Z"],
232 | )
233 | mindf = pd.DataFrame(
234 | min_curve_desurvey(adf["DEPTH"], adf["AZIM"], adf["DIP"], matrix=False),
235 | columns=["DEPTH", "X", "Y", "Z"],
236 | )
237 |
238 | fig = go.Figure()
239 | fig.add_trace(
240 | go.Scatter3d(
241 | x=tandf["X"],
242 | y=tandf["Y"],
243 | z=tandf["Z"],
244 | mode="lines+markers",
245 | name="Tangent",
246 | line=dict(color="red", width=15),
247 | )
248 | )
249 | fig.add_trace(
250 | go.Scatter3d(
251 | x=mindf["X"],
252 | y=mindf["Y"],
253 | z=mindf["Z"],
254 | mode="lines+markers",
255 | name="Min Curve",
256 | line=dict(color="blue", width=15),
257 | )
258 | )
259 | fig.update_layout(width=1200, height=1200, legend_title_text="Cluster")
260 | fig.update_layout(scene_aspectmode="data", scene_aspectratio=dict(x=1, y=1, z=1))
261 | fig.update_scenes(zaxis_autorange="reversed")
262 |
263 | fig.show()
264 |
265 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 23
266 | def linear_interpolation_desurvey(
267 | df: pd.DataFrame(), # dataframe containing the X Y Z desurveyed coordinates
268 | DEPTH: str, # Column name containing the survey depth values
269 | froto: pd.DataFrame(), # dataframe containing from and to values
270 | FROM: str, # Column name containing from depths in the interval
271 | TO: str, #Column name containing to values
272 | )->pd.DataFrame():
273 | "Linearly interpolates data downhole"
274 |
275 | df0 = df.copy()
276 | froto_out = froto.copy()
277 |
278 | #Drop duplicates, get xyz values and depths
279 | #tdf = df0[df0['X'].notna()].drop_duplicates('DEPTH', keep='last')
280 |
281 | x, y, z = df0[['X', 'Y', 'Z']].T.values
282 | w = df0[DEPTH].values
283 |
284 | #Set interpolation function and interpolate hole
285 | f = interpolate.interp1d(w, (x, y, z), fill_value="extrapolate")
286 |
287 | #x_int, y_int, z_int = f(froto_out[FROM].values)
288 | x_int, y_int, z_int = [Decimal(x_) for x_ in f(froto_out[FROM])[0, :]], [Decimal(y_) for y_ in f(froto_out[FROM])[1, :]], [Decimal(z_) for z_ in f(froto_out[FROM])[2, :]]
289 |
290 | #Put data in dataframe and replace original
291 | interpo_df = pd.DataFrame([x_int, y_int, z_int], index=['X', 'Y', 'Z']).T
292 | froto_out[['X', 'Y', 'Z']] = interpo_df
293 |
294 | return froto_out[[FROM, TO, 'X', 'Y', 'Z']]
295 |
296 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 26
297 | def spherical_interpolation_desurvey(
298 | df: pd.DataFrame(), # dataframe containing the X Y Z desurveyed coordinates
299 | DEPTH: str, # Column name containing the survey depth values
300 | froto: pd.DataFrame(), # dataframe containing from and to values
301 | FROM: str, # Column name containing from values
302 | TO: str, #Column name containing to values
303 | )->pd.DataFrame():
304 | "#Spherically interpolates interval data downhole"
305 |
306 | df0 = df.copy()
307 | froto_out = froto.copy()
308 |
309 | #If spherical interpolation dataframe is less than or equal to 3 then do linear
310 | if len(df0)<=3:
311 | froto_out = linear_interpolation_desurvey(df0, DEPTH, froto_out, FROM, TO)
312 | return froto_out[[FROM, TO, 'X', 'Y', 'Z']]
313 |
314 | #Remove any duplicates and get x, y, z values plus depth values
315 | x, y, z = df0[['X', 'Y', 'Z']].T.values
316 | w = df0[DEPTH].values
317 |
318 | x = np.array([np.float64(x_) for x_ in x], dtype=np.float64)
319 | y = np.array([np.float64(y_) for y_ in y], dtype=np.float64)
320 | z = np.array([np.float64(z_) for z_ in z], dtype=np.float64)
321 | w = np.array(w, dtype=np.float64)
322 |
323 | #Interpolates on the sphere
324 | tck, u = interpolate.splprep([x,y,z], s=1, u=w)
325 |
326 | #Get values to interpolate and interpolate
327 | u_int = froto_out[FROM].values
328 | x_int, y_int, z_int = interpolate.splev(u_int, tck)
329 |
330 | #Get interpolated values and replace original
331 | interpo_df = pd.DataFrame([x_int, y_int, z_int], index=['X', 'Y', 'Z']).T
332 | froto_out[['X', 'Y', 'Z']] = interpo_df
333 |
334 | return froto_out[[FROM, TO, 'X', 'Y', 'Z']]
335 |
336 |
337 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 28
338 | def desurvey_to_interval(
339 | adf: pd.DataFrame(), # pandas dataframe containing survey data
340 | DEPTH: str, #Column name in adf that contains the survey depth
341 | froto: pd.DataFrame(), #pandas dataframe containing from-to values
342 | FROM: str, # Column name in froto containing from values
343 | TO: str, # Column name in froto containing to values
344 | interpolation_method: str="spherical", # Type of method to interpolate in between data points (options are spherical or linear)
345 | )->pd.DataFrame():
346 | "Interpolates missing survey points along a drillstring using a spline (default) interpolation"
347 |
348 | # Reset index
349 | adf = adf.reset_index(drop=True)
350 |
351 | if interpolation_method=='linear':
352 | tsdfint = linear_interpolation_desurvey(adf, DEPTH, froto, FROM, TO)
353 | elif interpolation_method=='spherical':
354 | tsdfint = spherical_interpolation_desurvey(adf, DEPTH, froto, FROM, TO)
355 | else:
356 | print("Must select either 'linear' or 'spherical' interpolation method")
357 | return
358 |
359 |
360 | return tsdfint
361 |
362 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 31
363 | def apply_geographical_transform(
364 | adf: pd.DataFrame(), # pandas dataframe containing the survey and collar data
365 | df: pd.DataFrame(), # pandas dataframe containing the xyz data of the desurvy
366 | )-> pd.DataFrame():
367 | "Converts XYZ data of a desurveyed dataframe to geographical locations with from, mid and to locations"
368 | # Apply geographical corrections to location
369 | df[["EAST_fr", "NORTH_fr", "RL_fr"]] = pd.DataFrame([(Decimal(row["X"]) + Decimal(adf["EAST"].iloc[0]), Decimal(row["Y"]) + Decimal(adf["NORTH"].iloc[0]), (-1 * Decimal(row["Z"])) + Decimal(adf["RL"].iloc[0])) for idx, row in df.iterrows()], columns=["EAST_fr", "NORTH_fr", "RL_fr"],)
370 | df[["EAST_to", "NORTH_to", "RL_to"]] = df[["EAST_fr", "NORTH_fr", "RL_fr"]].shift(-1).fillna(np.nan)
371 | #df[["EAST_md", "NORTH_md", "RL_md"]] = pd.DataFrame([df[[col for col in df.columns if dimension in col]].mean(axis=1) for dimension in ["EAST", "NORTH", "RL"]]).T
372 | #df['EAST_md'] = [(Decimal(df['EAST_fr'].loc[idx])+Decimal(df['EAST_to'].loc[idx])/2) for idx, row in df.iterrows()]
373 |
374 | # FIXING DECIMAL ISSUE
375 | mean_decimal = lambda row: sum(map(Decimal, row))/len(row)
376 | df['EAST_md'] = df[['EAST_fr', 'EAST_to']].apply(mean_decimal, axis=1)
377 | df['NORTH_md'] = df[['NORTH_fr', 'NORTH_to']].apply(mean_decimal, axis=1)
378 | df['RL_md'] = df[['RL_fr', 'RL_to']].apply(mean_decimal, axis=1)
379 |
380 | df = df[df["FROM"] < adf["TD"].max()]
381 |
382 | if 'X' in df.columns:
383 | df = df.drop(['X', 'Y', 'Z'], axis=1)
384 |
385 | return df
386 |
387 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 34
388 | def desurvey_hole_to_interval(
389 | adf: pd.DataFrame(), # pandas Dataframe containing the dip and azim data
390 | interval: float, # Interval size to desurvey data to
391 | desurvey_type: str="min_curve", # desurvey type (min_curve for minimum curvature or anything else for tangent)
392 | )->pd.DataFrame():
393 | "Desurveys the hole and applies geographical locations to the data"
394 |
395 | #print(adf['HOLEID'].unique()[0])
396 |
397 | # Prepare survey data
398 | adf = add_zero_and_max_depths(adf).reset_index(drop=True)
399 | froto = generate_fromto(adf, 'TD', interval)
400 |
401 | # Desurvey the data
402 | if desurvey_type == "min_curve":
403 | df_des = pd.DataFrame(min_curve_desurvey(adf["DEPTH"], adf["AZIM"], adf["DIP"], matrix=False), columns=["DEPTH", "X", "Y", "Z"],)
404 | df_des = desurvey_to_interval(df_des, 'DEPTH', froto, 'FROM', 'TO', interpolation_method='spherical')
405 |
406 | else:
407 | df_des = pd.DataFrame(tangent_desurvey(adf["DEPTH"], adf["AZIM"], adf["DIP"], matrix=False), columns=["DEPTH", "X", "Y", "Z"])
408 | df_des = desurvey_to_interval(df_des, 'DEPTH', froto, 'FROM', 'TO', interpolation_method='linear')
409 |
410 | # Apply geographical corrections to location
411 | df_des = apply_geographical_transform(adf, df_des)
412 |
413 | return df_des
414 |
415 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 37
416 | def check_depths_custom(adf: pd.DataFrame, dfi: pd.DataFrame) -> pd.DataFrame:
417 | """
418 | Checks the depths of the custom interval from another dataframe
419 | ensuring they match to depths.
420 | """
421 | dfi = round_from_to(dfi, 'FROM', 'TO')
422 |
423 | # Remove rows where 'FROM' is greater than 'TO'
424 | dfi = dfi[dfi['TO'] >= dfi['FROM']].reset_index(drop=True)
425 |
426 | # Updating to include top of hole interval
427 | if dfi['FROM'].min() != 0:
428 | temp_from = 0
429 | temp_to = dfi['FROM'].min()
430 | temp_df = pd.DataFrame([[temp_from, temp_to]], columns=['FROM', 'TO'])
431 | dfi = pd.concat([dfi, temp_df], ignore_index=True)
432 |
433 | # Reset
434 | dfi = dfi.sort_values(["FROM"]).reset_index(drop=True)
435 |
436 | # Add new row for deepest row
437 | if adf['DEPTH'].max() > dfi['TO'].max():
438 | # Adding a tail to the end of the hole
439 | newrow = pd.DataFrame([[np.nan]*len(dfi.columns)], columns=dfi.columns)
440 | newrow["FROM"] = dfi["TO"].max()
441 | newrow["TO"] = adf["DEPTH"].max()
442 | newrow['HOLEID'] = adf['HOLEID']
443 | dfi = pd.concat([dfi, newrow], ignore_index=True)
444 |
445 | # Reset
446 | dfi = dfi.sort_values(["FROM"]).reset_index(drop=True)
447 |
448 | if dfi['TO'].max() >= adf['DEPTH'].max():
449 | # Updating 'DEPTH' and 'TD' in the last row of adf
450 | adf.loc[len(adf)-1, "DEPTH"] = dfi["TO"].max()
451 | adf["TD"] = dfi["TO"].max()
452 |
453 | if adf['DEPTH'].max() == dfi['TO'].max():
454 | # Adding a tail to the end of the hole
455 | newrow = dfi.iloc[[-1]].copy()
456 | newrow["FROM"] = dfi["TO"].max()
457 | newrow["TO"] = dfi["TO"].max() + 50
458 | dfi = pd.concat([dfi, newrow], ignore_index=True)
459 |
460 | dfi = dfi.sort_values(["FROM"]).reset_index(drop=True)
461 |
462 | adf = convert_columns_to_numerical(adf)
463 | dfi = convert_columns_to_numerical(dfi)
464 |
465 | return adf, dfi
466 |
467 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 43
468 | def desurvey_hole_custom(
469 | adf: pd.DataFrame(), # pandas Dataframe containing the dip and azim data
470 | dfi: pd.DataFrame(), # Dataframe containing the from and to intervals to desurvey to
471 | desurvey_type: str="min_curve", # desurvey type (min_curve for minimum curvature or anything else for tangent)
472 | )->pd.DataFrame():
473 | "Desurveys the hole and applies geographical locations to the data based on intervals of another dataframe"
474 |
475 | # Prepare survey data
476 | adf = add_zero_and_max_depths(adf)
477 |
478 | #Unwrap azimuth
479 | adf["AZIM"] = np.unwrap(adf["AZIM"], period=360)
480 |
481 | #Check interval depths
482 | dfi = round_from_to(dfi, 'FROM', 'TO')
483 | adf, dfi = check_depths_custom(adf, dfi)
484 |
485 | # Desurvey the data
486 | if desurvey_type == "min_curve":
487 | df_des = pd.DataFrame(min_curve_desurvey(adf["DEPTH"], adf["AZIM"], adf["DIP"], matrix=False), columns=["DEPTH", "X", "Y", "Z"],)
488 | df_des = desurvey_to_interval(df_des, 'DEPTH', dfi, 'FROM', 'TO', interpolation_method='spherical')
489 |
490 | else:
491 | df_des = pd.DataFrame(tangent_desurvey(adf["DEPTH"], adf["AZIM"], adf["DIP"], matrix=False), columns=["DEPTH", "X", "Y", "Z"])
492 | df_des = desurvey_to_interval(df_des, 'DEPTH', dfi, 'FROM', 'TO', interpolation_method='linear')
493 |
494 | # Apply geographical corrections to location
495 | df_des = apply_geographical_transform(adf, df_des)
496 | df_des['HOLEID'] = adf['HOLEID'].iloc[0]
497 |
498 | df_des = df_des[['HOLEID']+[col for col in df_des.columns if 'HOLEID' not in col]]
499 |
500 | return df_des
501 |
502 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 45
503 | def merge_desurveyed_and_data_custom(
504 | df_des: pd.DataFrame(), #Desurveyed dataframe
505 | dfi: pd.DataFrame(), # Dataframe with interval data
506 | )-> pd.DataFrame():
507 | "Merges the desurveyed data with the existing interval data"
508 | dfout = pd.merge(df_des, dfi, how='left', on=['HOLEID', 'FROM', 'TO'])
509 | if 'Interval' in dfout.columns:
510 | dfout = dfout.drop('Interval', axis=1)
511 | return dfout
512 |
513 | # %% ../nbs/Desurveying/01_desurveying_utils.ipynb 47
514 | def desurvey_df_from_df_custom(
515 | adf: pd.DataFrame(), # Dataframe containing the data to be composited
516 | dfi: pd.DataFrame(), # Dataframe containing the from and to intervals to join the data to
517 | desurvey_type: str, #Desurvey type either 'min_curve' for minimum curvature or anything else for tangential desurvey
518 | merge_data: bool=True, #Option whether to merge the data back with the desurveyed data
519 | )->pd.DataFrame():
520 | "Composites a dataframe of drillhole data from an existing dataframe with from-to intervals and the same drillhole IDs"
521 |
522 | #Loop through each unique hole
523 | df_dess = []
524 | for hole in adf['HOLEID'].unique():
525 | #print(hole)
526 |
527 | #See if unique hole exists in both dataframes
528 | if hole not in dfi['HOLEID'].unique():
529 | print(hole + " does not have any intervals to join to.")
530 | continue
531 |
532 | # Set both new dataframes
533 | adf_temp = adf[adf['HOLEID'] == hole].reset_index(drop=True)
534 | dfi_temp = dfi[dfi['HOLEID'] == hole].reset_index(drop=True)
535 |
536 | df_des = desurvey_hole_custom(adf_temp, dfi_temp, desurvey_type=desurvey_type)
537 |
538 | df_dess.append(df_des)
539 |
540 | #Concatenate to form final df
541 | all_des_df = pd.concat(df_dess).reset_index(drop=True)
542 |
543 | if merge_data==True:
544 | all_des_df = merge_desurveyed_and_data_custom(all_des_df, dfi)
545 |
546 | return all_des_df
547 |
--------------------------------------------------------------------------------
/interval_compositing.py:
--------------------------------------------------------------------------------
1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/Compositing/02_interval_compositing.ipynb.
2 |
3 | # %% auto 0
4 | __all__ = ['composite_drillhole_set', 'composite_dataframe_set', 'composite_dataframe_set_dask', 'composite_dataframe_set_mp',
5 | 'BIG_composite_dataframe_set_dask', 'composite_drillhole_custom', 'composite_dataframe_custom',
6 | 'composite_dataframe_custom_dask', 'composite_dataframe_custom_mp', 'BIG_composite_dataframe_custom_dask']
7 |
8 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 4
9 | import warnings
10 | warnings.filterwarnings("ignore")
11 |
12 | import pandas as pd
13 | import numpy as np
14 | #import dask
15 | # import dask.dataframe as dd
16 | # from dask.distributed import Client, LocalCluster
17 | # import dask.bag as db
18 | import multiprocessing
19 | import multiprocess as mp
20 |
21 | from drillhole_utils import *
22 | from compositing_utils import *
23 |
24 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 5
25 | def composite_drillhole_set(
26 | df: pd.DataFrame, # Pandas dataframe containing drillhole data
27 | holeid: str, # Column name containing the hole ids
28 | fro: str, # Column name containing the 'from' depths
29 | to: str, # Colunm name containing the 'to' depths
30 | interval: float = 1, # Numerical value to create interval lengths (default = 1m)
31 | cat_stats: list=['max_overlap', 'max_overlap_weight', 'unique_class_count', 'enclosed_class_count', 'min_overlap'], # list of categorical statistics to calculate in the composite
32 | num_stats: list=['weighted_average', 'max', 'min', 'range'], # list of numerical statistics to calculate in the composite
33 | columns_to_comp: list = None, # column names to include in the compositing
34 | ignore_cat_nans: bool=True, # True to ignore missing categorical data
35 | )->pd.DataFrame:
36 | """This is a function that calculates the composited data of a given interval over an entire drillhole.
37 | It takes in a Pandas dataframe containing drillhole data, the column name containing the hole ids, the column
38 | name containing the 'from' depths, and the colunm name containing the 'to' depths as parameters. It also takes
39 | in an optional numerical value to create interval lengths, a list of categorical statistics to calculate in
40 | the composite, and a list of numerical statistics to calculate in the composite. The function then ensures
41 | there are no gaps in from-to values, fills categories, generates composited intervals, calculates weights of
42 | each interval against original intervals, and calculates composites before returning the composited dataframe."""
43 |
44 | # Ensure no gaps in from-to values
45 | df = add_missing_intervals(df, holeid, fro, to).reset_index(drop=True)
46 |
47 | # Fill categories
48 | df = fill_cats(df)
49 |
50 | # Generate the composited intervals
51 | df = create_composite_intervals(df, fro, to, interval=interval)
52 |
53 | # Calculate the weights of each interval against original intervals
54 | edf = calculate_interval_weights(df, fro, to, interval=interval)
55 |
56 | # Calculate the comopsites
57 | compdf = composite_data(edf, holeid, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans).sort_values(fro)
58 |
59 | return compdf
60 |
61 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 8
62 | def composite_dataframe_set(
63 | df: pd.DataFrame, # Pandas dataframe containing drillhole data
64 | holeid: str, # Column name containing the hole ids
65 | fro: str, # Column name containing the 'from' depths
66 | to: str, # Colunm name containing the 'to' depths
67 | interval: float = 1, # Numerical value to create interval lengths (default = 1m)
68 | cat_stats: list=['max_overlap', 'max_overlap_weight', 'unique_class_count', 'enclosed_class_count', 'min_overlap'], # list of categorical statistics to calculate in the composite
69 | num_stats: list=['weighted_average', 'max', 'min', 'range'], # list of numerical statistics to calculate in the composite
70 | columns_to_comp: list = None, # column names to include in the compositing
71 | ignore_cat_nans: bool=True, # True to ignore missing categorical data
72 | )->pd.DataFrame:
73 | """This function takes in a Pandas dataframe containing drillhole data and creates a composite dataframe
74 | by grouping the drillholes. It takes in the column names containing the hole ids, 'from' depths, and 'to'
75 | depths as parameters. It also takes in an interval length (default = 1m) and lists of categorical and
76 | numerical statistics to calculate for the composite. The columns to include in the compositing can also
77 | be specified, as well as whether to ignore missing categorical data or not (default = True). The function
78 | then applies a composited_rillhole_set function to each drillhole and returns a new composite dataframe."""
79 |
80 | # Groupby and composite drillholes
81 | composite_df = df.groupby(holeid).apply(composite_drillhole_set, holeid, fro, to, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans).reset_index(drop=True)
82 |
83 | return composite_df
84 |
85 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 11
86 | def composite_dataframe_set_dask(
87 | df: pd.DataFrame, # Pandas dataframe containing drillhole data
88 | holeid: str, # Column name containing the hole ids
89 | fro: str, # Column name containing the 'from' depths
90 | to: str, # Colunm name containing the 'to' depths
91 | interval: float = 1, # Numerical value to create interval lengths (default = 1m)
92 | cat_stats: list=['max_overlap', 'max_overlap_weight', 'unique_class_count', 'enclosed_class_count', 'min_overlap'], # list of categorical statistics to calculate in the composite
93 | num_stats: list=['weighted_average', 'max', 'min', 'range'], # list of numerical statistics to calculate in the composite
94 | columns_to_comp: list = None, # column names to include in the compositing
95 | ignore_cat_nans: bool=True, # True to ignore missing categorical data
96 | )->pd.DataFrame:
97 | """This function takes in a Pandas dataframe containing drillhole data and creates a composite dataframe
98 | by grouping the drillholes. It takes in the column names containing the hole ids, 'from' depths, and 'to'
99 | depths as parameters. It also takes in an interval length (default = 1m) and lists of categorical and
100 | numerical statistics to calculate for the composite. The columns to include in the compositing can also
101 | be specified, as well as whether to ignore missing categorical data or not (default = True). The function
102 | then applies a composited_rillhole_set function to each drillhole and returns a new composite dataframe."""
103 |
104 | # Create a Dask bag of the unique holes
105 | holes = db.from_sequence([hole for hole in df[holeid].unique()])
106 |
107 | # Map the composite_drillhole_set function to each hole in parallel
108 | compdfs = holes.map(lambda hole: composite_drillhole_set(
109 | df[df[holeid] == hole].reset_index(drop=True),
110 | holeid, fro, to, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans))
111 |
112 | # Compute the results and concatenate the resulting dataframes to form the final output
113 | all_comp_df = pd.concat(compdfs.compute()).reset_index(drop=True)
114 |
115 | return all_comp_df
116 |
117 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 13
118 | def composite_dataframe_set_mp(
119 | df: pd.DataFrame, # Pandas dataframe containing drillhole data
120 | holeid: str, # Column name containing the hole ids
121 | fro: str, # Column name containing the 'from' depths
122 | to: str, # Colunm name containing the 'to' depths
123 | interval: float = 1, # Numerical value to create interval lengths (default = 1m)
124 | cat_stats: list=['max_overlap', 'max_overlap_weight', 'unique_class_count', 'enclosed_class_count', 'min_overlap'], # list of categorical statistics to calculate in the composite
125 | num_stats: list=['weighted_average', 'max', 'min', 'range'], # list of numerical statistics to calculate in the composite
126 | columns_to_comp: list = None, # column names to include in the compositing
127 | ignore_cat_nans: bool=True, # True to ignore missing categorical data
128 | )->pd.DataFrame:
129 | """This function takes in a Pandas dataframe containing drillhole data and creates a composite dataframe
130 | by grouping the drillholes. It takes in the column names containing the hole ids, 'from' depths, and 'to'
131 | depths as parameters. It also takes in an interval length (default = 1m) and lists of categorical and
132 | numerical statistics to calculate for the composite. The columns to include in the compositing can also
133 | be specified, as well as whether to ignore missing categorical data or not (default = True). The function
134 | then applies a composited_rillhole_set function to each drillhole and returns a new composite dataframe."""
135 |
136 | def runit(hole):
137 | outdf = composite_drillhole_set(df[df[holeid] == hole].reset_index(drop=True), holeid, fro, to, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
138 | return outdf
139 |
140 | num_processes = multiprocessing.cpu_count()
141 |
142 | # Create a Dask bag of the unique holes
143 | holes = df[holeid].unique()
144 |
145 | # if __name__ == '__main__':
146 | with mp.Pool(processes=num_processes) as pool:
147 | results = pool.map(runit, holes)
148 |
149 | # Compute the results and concatenate the resulting dataframes to form the final output
150 | all_comp_df = pd.concat(results).reset_index(drop=True)
151 |
152 | return all_comp_df
153 |
154 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 15
155 | def BIG_composite_dataframe_set_dask(
156 | df: pd.DataFrame, # Pandas dataframe containing drillhole data
157 | holeid: str, # Column name containing the hole ids
158 | fro: str, # Column name containing the 'from' depths
159 | to: str, # Colunm name containing the 'to' depths
160 | interval: float = 1, # Numerical value to create interval lengths (default = 1m)
161 | cat_stats: list=['max_overlap', 'max_overlap_weight', 'unique_class_count', 'enclosed_class_count', 'min_overlap'], # list of categorical statistics to calculate in the composite
162 | num_stats: list=['weighted_average', 'max', 'min', 'range'], # list of numerical statistics to calculate in the composite
163 | columns_to_comp: list = None, # column names to include in the compositing
164 | ignore_cat_nans: bool=True, # True to ignore missing categorical data
165 | )->pd.DataFrame:
166 | """This function takes a dataframe containing downhole point data and composites it to a desired interval.
167 | The function returns a dataframe with composited point data for all holes. Using DASK, it splits the data into
168 | 1000 hole subsets"""
169 |
170 | # Group by 1000 holes so it doesnt put to much overhead on dask
171 | grouped = df.groupby((df[holeid].rank(method='dense'))//1000)
172 |
173 | #Loop through and calculate for each group
174 | compdfs = []
175 | for _, group in grouped:
176 | # process each group here
177 | _df = composite_dataframe_set_dask(group, holeid, fro, to, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
178 | compdfs.append(_df)
179 |
180 | #Concat to final
181 | dfout = pd.concat(compdfs)
182 |
183 | return dfout
184 |
185 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 18
186 | def composite_drillhole_custom(
187 | df0: pd.DataFrame, # Dataframe containing the data to be composited
188 | dfi: pd.DataFrame, # Dataframe containing the from and to intervals to join the data to
189 | holeid: str, # Column name containing the holeids
190 | fro: str, # Column name containing the 'from' depths
191 | to: str, # Colunm name containing the 'to' depths
192 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
193 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
194 | columns_to_comp: list = None, # column names to include in the compositing
195 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
196 | )->pd.DataFrame:
197 |
198 | """This function creates a composite drillhole from a dataframe containing the data to be composited
199 | (df0) and a dataframe containing the from and to intervals to join the data to (dfi). It takes in the
200 | column names containing the holeids, 'from' depths, and 'to' depths as parameters. It also takes in
201 | two lists of statistics - one for categorical statistics and one for numerical statistics - that will
202 | be calculated in the composite. Additionally, it takes in an optional parameter for column names to
203 | include in the compositing, as well as an optional boolean parameter for ignoring missing categorical data.
204 | The function first fixes missing intervals of data and fills cats in df0, then adds missing intervals
205 | for interval dataframe dfi. It then checks if there are incorrect from-to intervals in either df0 or dfi,
206 | and prints out an error message if so. Finally, it calculates composites and appends per hole before
207 | returning a composite drillhole DataFrame (compdf)."""
208 |
209 | # Fix missing intervals of data and fill cats
210 | df0_temp = add_missing_intervals(df0, holeid, fro, to).reset_index(drop=True)
211 | df0_temp = fill_cats(df0_temp)
212 |
213 | # Add missing intervals for interval dataframe
214 | dfi_temp = add_missing_intervals(dfi, holeid, fro, to).reset_index(drop=True)
215 |
216 | hole = df0_temp[holeid].unique()[0]
217 | if len(df0_temp[df0_temp[fro].shift(-1) < df0_temp[to]]) > 1:
218 | print(hole + " has incorrect from-to intervals in the joining data")
219 |
220 | if len(dfi_temp[dfi_temp[fro].shift(-1) < dfi_temp[to]]) > 1:
221 | print(hole + "has incorrect from-to intervals in the interval data")
222 |
223 | if len(dfi_temp[dfi_temp[fro]=1:
224 | print(hole + "has incorrect from-to intervals in the interval data")
225 |
226 |
227 | # Calculate composites and append per hole
228 | df0_temp = create_composite_intervals_from_df(df0_temp, dfi_temp, holeid, fro, to)
229 | edf = calculate_interval_weights_from_df(df0_temp, holeid, fro, to)
230 | compdf = composite_data(edf, holeid, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
231 |
232 |
233 | return compdf
234 |
235 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 22
236 | def composite_dataframe_custom(
237 | df0: pd.DataFrame, # Dataframe containing the data to be composited
238 | dfi: pd.DataFrame, # Dataframe containing the from and to intervals to join the data to
239 | holeid: str, # Column name containing the holeids
240 | fro: str, # Column name containing the 'from' depths
241 | to: str, # Colunm name containing the 'to' depths
242 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
243 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
244 | columns_to_comp: list = None, # column names to include in the compositing
245 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
246 | combine_existing_data: bool=True # Combine with data from the custom dataframe
247 | )->pd.DataFrame:
248 |
249 | """This function creates a composite dataframe from two dataframes. It takes in two dataframes (df0 and dfi),
250 | the column names containing the holeids, 'from' depths, and 'to' depths, as well as lists of categorical
251 | and numerical statistics to calculate in the composite. It also takes in a parameter for columns to
252 | include in the compositing, whether to ignore missing categorical data, and whether to combine with
253 | existing data from the custom dataframe. The function loops through each unique hole and checks if it
254 | exists in both dataframes. If it does, it sets both new dataframes and calls another function
255 | (composite_drillhole_custom) to create a composite drillhole. The composite drillholes are concatenated
256 | together to form a final df which can be merged with existing data from the custom dataframe if desired. """
257 |
258 | # Loop through each unique hole
259 | compdfs = []
260 | for hole in dfi[holeid].unique():
261 |
262 | # See if unique hole exists in both dataframes
263 | if hole not in list(df0[holeid].unique()):
264 | print(hole + " does not have any intervals to join to.")
265 | continue
266 |
267 | # Set both new dataframes
268 | df0_temp = df0[df0[holeid] == hole].reset_index(drop=True)
269 | dfi_temp = dfi[dfi[holeid] == hole].reset_index(drop=True)
270 |
271 | compdf = composite_drillhole_custom(df0_temp, dfi_temp, holeid, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
272 |
273 | compdfs.append(compdf)
274 |
275 | # Concatenate to form final df
276 | all_comp_df = pd.concat(compdfs)
277 |
278 | if combine_existing_data:
279 | all_comp_df = pd.merge(dfi, all_comp_df, on=[holeid, fro, to])
280 |
281 | return all_comp_df
282 |
283 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 26
284 | def composite_dataframe_custom_dask(
285 | df0: pd.DataFrame, # Dataframe containing the data to be composited
286 | dfi: pd.DataFrame, # Dataframe containing the from and to intervals to join the data to
287 | holeid: str, # Column name containing the holeids
288 | fro: str, # Column name containing the 'from' depths
289 | to: str, # Colunm name containing the 'to' depths
290 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
291 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
292 | columns_to_comp: list = None, # column names to include in the compositing
293 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
294 | combine_existing_data: bool=True # Combine with data from the custom dataframe
295 | )->pd.DataFrame:
296 |
297 | """This function creates a composite dataframe from two dataframes. It takes in two dataframes (df0 and dfi),
298 | the column names containing the holeids, 'from' depths, and 'to' depths, as well as lists of categorical
299 | and numerical statistics to calculate in the composite. It also takes in a parameter for columns to
300 | include in the compositing, whether to ignore missing categorical data, and whether to combine with
301 | existing data from the custom dataframe. The function loops through each unique hole and checks if it
302 | exists in both dataframes. If it does, it sets both new dataframes and calls another function
303 | (composite_drillhole_custom) to create a composite drillhole. The composite drillholes are concatenated
304 | together to form a final df which can be merged with existing data from the custom dataframe if desired. """
305 |
306 | print(f'The following holes do not have intervals to join to: {[hole for hole in dfi[holeid].unique() if hole not in df0[holeid].unique()]}')
307 |
308 | # Create a Dask bag of the unique holes
309 | holes = db.from_sequence([hole for hole in dfi[holeid].unique() if hole in df0[holeid].unique()])
310 |
311 | # Map the composite_drillhole_custom function to each hole in parallel
312 | compdfs = holes.map(lambda hole: composite_drillhole_custom(
313 | df0[df0[holeid] == hole].reset_index(drop=True),
314 | dfi[dfi[holeid] == hole].reset_index(drop=True),
315 | holeid, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans))
316 |
317 | # Compute the results and concatenate the resulting dataframes to form the final output
318 | all_comp_df = pd.concat(compdfs.compute())
319 |
320 | if combine_existing_data:
321 | all_comp_df = pd.merge(dfi, all_comp_df, on=[holeid, fro, to])
322 |
323 | return all_comp_df
324 |
325 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 28
326 | def composite_dataframe_custom_mp(
327 | df0: pd.DataFrame, # Dataframe containing the data to be composited
328 | dfi: pd.DataFrame, # Dataframe containing the from and to intervals to join the data to
329 | holeid: str, # Column name containing the holeids
330 | fro: str, # Column name containing the 'from' depths
331 | to: str, # Colunm name containing the 'to' depths
332 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
333 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
334 | columns_to_comp: list = None, # column names to include in the compositing
335 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
336 | combine_existing_data: bool=True # Combine with data from the custom dataframe
337 | )->pd.DataFrame:
338 | """This function takes in a Pandas dataframe containing drillhole data and creates a composite dataframe
339 | by grouping the drillholes. It takes in the column names containing the hole ids, 'from' depths, and 'to'
340 | depths as parameters. It also takes in an interval length (default = 1m) and lists of categorical and
341 | numerical statistics to calculate for the composite. The columns to include in the compositing can also
342 | be specified, as well as whether to ignore missing categorical data or not (default = True). The function
343 | then applies a composited_rillhole_set function to each drillhole and returns a new composite dataframe."""
344 |
345 |
346 | def runit(hole):
347 | outdf = composite_drillhole_custom(
348 | df0[df0[holeid] == hole].reset_index(drop=True),
349 | dfi[dfi[holeid] == hole].reset_index(drop=True),
350 | holeid, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
351 | return outdf
352 |
353 | num_processes = multiprocessing.cpu_count()
354 |
355 | # Create a Dask bag of the unique holes
356 | holes = [hole for hole in dfi[holeid].unique() if hole in df0[holeid].unique()]
357 |
358 | # if __name__ == '__main__':
359 | # st.write('yes')
360 |
361 | with mp.Pool(processes=num_processes) as pool:
362 | results = pool.map(runit, holes)
363 | # else:
364 | # import streamlit as st
365 | # st.write('no main')
366 | # st.write(__name__)
367 |
368 | # Compute the results and concatenate the resulting dataframes to form the final output
369 | all_comp_df = pd.concat(results).reset_index(drop=True)
370 |
371 | if combine_existing_data:
372 | all_comp_df = pd.merge(dfi, all_comp_df, on=[holeid, fro, to])
373 |
374 | return all_comp_df
375 |
376 | # %% ../nbs/Compositing/02_interval_compositing.ipynb 30
377 | def BIG_composite_dataframe_custom_dask(
378 | df0: pd.DataFrame, # Dataframe containing the data to be composited
379 | dfi: pd.DataFrame, # Dataframe containing the from and to intervals to join the data to
380 | holeid: str, # Column name containing the holeids
381 | fro: str, # Column name containing the 'from' depths
382 | to: str, # Colunm name containing the 'to' depths
383 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
384 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
385 | columns_to_comp: list = None, # column names to include in the compositing
386 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
387 | combine_existing_data: bool=True # Combine with data from the custom dataframe
388 | )->pd.DataFrame:
389 | """This function takes a dataframe containing downhole point data and composites it to a desired interval.
390 | The function returns a dataframe with composited point data for all holes. Using DASK, it splits the data into
391 | 1000 hole subsets"""
392 |
393 | # Group by 1000 holes so it doesnt put to much overhead on dask
394 | grouped = dfi.groupby((dfi[holeid].rank(method='dense'))//1000)
395 |
396 | #Loop through and calculate for each group
397 | compdfs = []
398 | for _, group in grouped:
399 |
400 | igroup = dfi[dfi[holeid].isin(group[holeid].unique())].reset_index(drop=True)
401 |
402 | # process each group here
403 | _df = composite_dataframe_custom_dask(group, igroup, holeid, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans, combine_existing_data)
404 | compdfs.append(_df)
405 |
406 | #Concat to final
407 | dfout = pd.concat(compdfs)
408 |
409 | return dfout
410 |
--------------------------------------------------------------------------------
/point_compositing.py:
--------------------------------------------------------------------------------
1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/Compositing/03_point_compositing.ipynb.
2 |
3 | # %% auto 0
4 | __all__ = ['composite_drillhole_point_set', 'composite_point_dataframe_set', 'composite_point_dataframe_set_dask',
5 | 'composite_point_dataframe_set_mp', 'BIG_composite_point_dataframe_set_dask',
6 | 'composite_drillhole_point_custom', 'composite_dataframe_point_custom',
7 | 'composite_dataframe_point_custom_dask', 'composite_dataframe_point_custom_mp',
8 | 'BIG_composite_dataframe_point_custom_dask']
9 |
10 | # %% ../nbs/Compositing/03_point_compositing.ipynb 4
11 | import warnings
12 | warnings.filterwarnings("ignore")
13 |
14 | import pandas as pd
15 | import numpy as np
16 | import math
17 | import itertools
18 | # import dask.bag as db
19 | # import dask
20 | # import dask.dataframe as dd
21 | # from dask.distributed import Client, LocalCluster
22 | import multiprocessing
23 | import multiprocess as mp
24 |
25 | from drillhole_utils import *
26 | from compositing_utils import *
27 |
28 | # %% ../nbs/Compositing/03_point_compositing.ipynb 5
29 | def composite_drillhole_point_set(
30 | df: pd.DataFrame, # Pandas dataframe containing single drillhole data
31 | holeid: str, # Column name containing the holeid
32 | depth: str, # Column name containing the 'from' depths
33 | interval: float, # interval to composite points to
34 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
35 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
36 | columns_to_comp: str = None, # column names to include in the compositing
37 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
38 | )->pd.DataFrame:
39 | """This function takes a Pandas dataframe containing drillhole data, a column name containing the holeid,
40 | a column name containing the 'from' depths, an interval to composite points to, and two lists of statistics
41 | (categorical and numerical) to calculate in the composite. It then rounds, cuts depth and fills categories.
42 | It creates a list of columns to include in the compositing and checks if there is only one interval.
43 | If there is more than one interval it calculates composites for each interval and concatenates them
44 | into one dataframe. If there is only one interval it groups by that interval and calculates composites.
45 | It assigns new from and to columns, adds missing intervals if points don't occur in every interval,
46 | reorders columns and returns the dataframe."""
47 |
48 | #Round, cut depth and fill categories
49 | df = round_depth(df, depth)
50 | try:
51 | df = cut_depth_dataframe_into_intervals(df, depth, interval)
52 | except:
53 | print(f'No depth data for {df[holeid].unique()[0]}')
54 | return pd.DataFrame()
55 |
56 | df = fill_cats(df)
57 |
58 | if not columns_to_comp:
59 | columns_to_comp = list(df.columns)
60 |
61 | columns_to_comp = [col for col in columns_to_comp if col not in [holeid, depth, 'Depth_Interval', 'Weights']]
62 |
63 | # Check if theres only one interval
64 | if len(df["Depth_Interval"].unique()) > 1:
65 |
66 | # Creates list to remove unique columns from being comp'd
67 | unicols = [col for col in list(df[columns_to_comp].columns[df[columns_to_comp].nunique() <= 1])]
68 | multicols = [col for col in columns_to_comp if col not in unicols]
69 |
70 | compdfs = []
71 | for bin in sorted(df['Depth_Interval'].unique()):
72 | testdf = df[df['Depth_Interval']==bin][multicols+[depth, 'Depth_Interval']]
73 | compdftemp = calculate_point_composites(testdf, depth, cat_stats, num_stats, multicols, ignore_cat_nans)
74 | compdfs.append(compdftemp)
75 |
76 | compdf = pd.concat(compdfs).reset_index(drop=True)
77 | compdf['Depth_Interval'] = sorted(df['Depth_Interval'].unique())
78 | else:
79 | compdf = df[columns_to_comp+['Depth_Interval']].groupby(["Depth_Interval"]).apply(calculate_point_composites, depth, cat_stats, num_stats, columns_to_comp, ignore_cat_nans).reset_index()
80 |
81 | # Assign new from and to columns
82 | compdf['FROM'] = [np.round(x, 2) for x in [float(x.replace("(", "").replace("]", "").split(",")[0]) for x in compdf["Depth_Interval"].astype(str)]]
83 | compdf['TO'] = [np.round(x, 2) for x in [float(x.replace("(", "").replace("]", "").split(",")[1]) for x in compdf["Depth_Interval"].astype(str)]]
84 |
85 |
86 | #Add missing intervals if points dont occur in every interval
87 | froto = generate_fromto(df, depth, interval=interval)
88 | compdf = pd.merge(froto, compdf, on=['FROM', 'TO'], how='left')
89 |
90 | # Reorder columns and return df
91 | start_cols = [holeid, 'FROM', 'TO']
92 | compdf[holeid] = df[holeid].iloc[0]
93 | compdf = compdf[start_cols + [col for col in compdf.columns if col not in start_cols]].sort_values('FROM')
94 |
95 | if "level_1" in compdf.columns:
96 | compdf = compdf.drop("level_1", axis=1)
97 |
98 | if "Depth_Interval" in compdf.columns:
99 | compdf = compdf.drop("Depth_Interval", axis=1)
100 |
101 | return compdf
102 |
103 | # %% ../nbs/Compositing/03_point_compositing.ipynb 7
104 | def composite_point_dataframe_set(
105 | df: pd.DataFrame(), # Dataframe containing downhole point data to composite
106 | holeid: str, # the column name containing the hole id
107 | depth: str, # The column name containing the point depth values
108 | interval: float = 1, # Desired interval to composite to
109 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
110 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
111 | columns_to_comp: str = None, # column names to include in the compositing
112 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
113 | ) -> pd.DataFrame():
114 | """This function takes a dataframe containing downhole point data and composites it to a desired interval.
115 | The function returns a dataframe with composited point data for all holes."""
116 |
117 | # Grouping by holeid and performing composite
118 | outdf = df.groupby(holeid).apply(composite_drillhole_point_set, holeid, depth, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans).reset_index(drop=True)
119 |
120 | if "level_1" in outdf.columns:
121 | outdf = outdf.drop(["level_1"], axis=1)
122 |
123 | return outdf
124 |
125 | # %% ../nbs/Compositing/03_point_compositing.ipynb 9
126 | def composite_point_dataframe_set_dask(
127 | df: pd.DataFrame(), # Dataframe containing downhole point data to composite
128 | holeid: str, # the column name containing the hole id
129 | depth: str, # The column name containing the point depth values
130 | interval: float = 1, # Desired interval to composite to
131 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
132 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
133 | columns_to_comp: str = None, # column names to include in the compositing
134 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
135 | ) -> pd.DataFrame():
136 | """This function takes a dataframe containing downhole point data and composites it to a desired interval.
137 | The function returns a dataframe with composited point data for all holes. Using DASK"""
138 |
139 | # Create a Dask bag of the unique holes
140 | holes = db.from_sequence([hole for hole in df[holeid].unique()])
141 |
142 | # Map the composite_drillhole_custom function to each hole in parallel
143 | compdfs = holes.map(lambda hole: composite_drillhole_point_set(
144 | df[df[holeid] == hole],
145 | holeid, depth, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans))
146 |
147 | # Compute the results and concatenate the resulting dataframes to form the final output
148 | all_comp_df = pd.concat(compdfs.compute())
149 |
150 | return all_comp_df
151 |
152 | # %% ../nbs/Compositing/03_point_compositing.ipynb 11
153 | def composite_point_dataframe_set_mp(
154 | df: pd.DataFrame(), # Dataframe containing downhole point data to composite
155 | holeid: str, # the column name containing the hole id
156 | depth: str, # The column name containing the point depth values
157 | interval: float = 1, # Desired interval to composite to
158 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
159 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
160 | columns_to_comp: str = None, # column names to include in the compositing
161 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
162 | ) -> pd.DataFrame():
163 | """This function takes a dataframe containing downhole point data and composites it to a desired interval.
164 | The function returns a dataframe with composited point data for all holes. Using DASK"""
165 |
166 |
167 | def runit(hole):
168 | outdf = composite_drillhole_point_set(
169 | df[df[holeid] == hole],
170 | holeid, depth, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
171 | return outdf
172 |
173 | num_processes = multiprocessing.cpu_count()
174 |
175 | # Create a Dask bag of the unique holes
176 | holes = df[holeid].unique()
177 |
178 | # if __name__ == '__main__':
179 | with mp.Pool(processes=num_processes) as pool:
180 | results = pool.map(runit, holes)
181 |
182 | # Compute the results and concatenate the resulting dataframes to form the final output
183 | all_comp_df = pd.concat(results).reset_index(drop=True)
184 |
185 |
186 | return all_comp_df
187 |
188 | # %% ../nbs/Compositing/03_point_compositing.ipynb 13
189 | def BIG_composite_point_dataframe_set_dask(
190 | df: pd.DataFrame(), # Dataframe containing downhole point data to composite
191 | holeid: str, # the column name containing the hole id
192 | depth: str, # The column name containing the point depth values
193 | interval: float = 1, # Desired interval to composite to
194 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
195 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
196 | columns_to_comp: str = None, # column names to include in the compositing
197 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
198 | ) -> pd.DataFrame():
199 | """This function takes a dataframe containing downhole point data and composites it to a desired interval.
200 | The function returns a dataframe with composited point data for all holes. Using DASK, it splits the data into
201 | 1000 hole subsets"""
202 |
203 | # Group by 1000 holes so it doesnt put to much overhead on dask
204 | grouped = df.groupby((df[holeid].rank(method='dense'))//1000)
205 |
206 | #Loop through and calculate for each group
207 | compdfs = []
208 | for _, group in grouped:
209 | # process each group here
210 | _df = composite_point_dataframe_set_dask(group, holeid, depth, interval, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
211 | compdfs.append(_df)
212 |
213 | #Concat to final
214 | dfout = pd.concat(compdfs)
215 |
216 | return dfout
217 |
218 | # %% ../nbs/Compositing/03_point_compositing.ipynb 16
219 | def composite_drillhole_point_custom(
220 | df0: pd.DataFrame, # Pandas dataframe containing drillhole data
221 | dfi: pd.DataFrame, # dataframe containing interval information
222 | holeid: str, # Column name containing the holeid
223 | depth: str, # Column name containing the 'from' depths
224 | fro: str, # The column name containing the interval from values
225 | to: str, # The column name containing the interval from values
226 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
227 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
228 | columns_to_comp: str = None, # column names to include in the compositing
229 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
230 | )->pd.DataFrame:
231 | """This function uses the calculate_point_composites function to composite the data across an entire drillhole.
232 | It takes in a Pandas dataframe containing drillhole data, a dataframe containing interval information,
233 | and column names for holeid, depth, and columns to include in the compositing. It also takes in two lists
234 | of statistics to calculate in the composite: one for categorical stats and one for numerical stats.
235 | It then creates a list to remove unique columns from being comp'd and creates a composite dataframe
236 | for each interval. If there is only one interval, it groups by that interval instead. The function then
237 | assigns new from and to columns, adds missing intervals if points don't occur in every interval,
238 | reorders columns, and returns the dataframe."""
239 |
240 | #Fill cats and create point intervals
241 | df= df0.copy()
242 | df = fill_cats(df)
243 | df = create_point_intervals_from_df(df, dfi, holeid, depth, fro, to)
244 |
245 | if not columns_to_comp:
246 | columns_to_comp = list(df.columns)
247 |
248 | columns_to_comp = [col for col in columns_to_comp if col not in [holeid, depth, 'Depth_Interval', 'Weights']]
249 |
250 | # Check if theres only one interval
251 | if len(df["Depth_Interval"].unique()) > 1:
252 |
253 | # Creates list to remove unique columns from being comp'd
254 | unicols = [col for col in list(df[columns_to_comp].columns[df[columns_to_comp].nunique() <= 1])]
255 | multicols = [col for col in columns_to_comp if col not in unicols]
256 |
257 | compdfs = []
258 | for bin in sorted(df['Depth_Interval'].unique()):
259 | testdf = df[df['Depth_Interval']==bin][multicols+[depth, 'Depth_Interval']]
260 | compdftemp = calculate_point_composites(testdf, depth, cat_stats, num_stats, multicols, ignore_cat_nans)
261 | compdfs.append(compdftemp)
262 |
263 | compdf = pd.concat(compdfs).reset_index(drop=True)
264 | compdf['Depth_Interval'] = sorted(df['Depth_Interval'].unique())
265 | else:
266 | compdf = df[columns_to_comp+[depth, 'Depth_Interval']].groupby(["Depth_Interval"]).apply(calculate_point_composites, depth, cat_stats, num_stats, columns_to_comp, ignore_cat_nans).reset_index()
267 |
268 | # Assign new from and to columns
269 | compdf['FROM'] = [np.round(x, 2) for x in [float(x.replace("(", "").replace("]", "").split(",")[0]) for x in compdf["Depth_Interval"].astype(str)]]
270 | compdf['TO'] = [np.round(x, 2) for x in [float(x.replace("(", "").replace("]", "").split(",")[1]) for x in compdf["Depth_Interval"].astype(str)]]
271 |
272 |
273 | #Add missing intervals if points dont occur in every interval
274 | compdf = pd.merge(dfi[['FROM', 'TO']], compdf, on=['FROM', 'TO'], how='left')
275 |
276 | # Reorder columns and return df
277 | start_cols = [holeid, 'FROM', 'TO']
278 | compdf[holeid] = df[holeid].iloc[0]
279 | compdf = compdf[start_cols + [col for col in compdf.columns if col not in start_cols]].sort_values('FROM')
280 |
281 | if "level_1" in compdf.columns:
282 | compdf = compdf.drop("level_1", axis=1)
283 |
284 | if "Depth_Interval" in compdf.columns:
285 | compdf = compdf.drop("Depth_Interval", axis=1)
286 |
287 | return compdf
288 |
289 | # %% ../nbs/Compositing/03_point_compositing.ipynb 20
290 | def composite_dataframe_point_custom(
291 | df0: pd.DataFrame, # Pandas dataframe containing drillhole data
292 | dfi: pd.DataFrame, # dataframe containing interval information
293 | holeid: str, # the column name containing the hole id
294 | depth: str, # The column name containing the point depth values
295 | fro: str, # The column name containing the interval from values
296 | to: str, # The column name containing the interval from values
297 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
298 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
299 | columns_to_comp: str = None, # column names to include in the compositing
300 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
301 | combine_existing_data: bool=True # Combine with data from the custom dataframe
302 | )->pd.DataFrame:
303 |
304 | """This function takes in two pandas dataframes (df0 and dfi), a holeid, depth, fro, and to column names,
305 | a list of categorical statistics to calculate in the composite (catstats), a list of numerical statistics
306 | to calculate in the composite (numstats), an optional columns_to_comp parameter, an ignore_cat_nans boolean
307 | parameter, and a combine_existing_data boolean parameter. It then loops through each unique hole in the dfi
308 | dataframe and checks if it exists in both dataframes. If it does exist in both dataframes, it adds missing
309 | intervals for interval dataframe and calculates composites. Finally, it concatenates all composites into
310 | one final dataframe and returns it. If combine_existing_data is set to True, then it merges the dfi dataframe
311 | with the all composites dataframe on the holeid, fro, and to columns before returning it."""
312 |
313 | # Loop through each unique hole
314 | compdfs = []
315 | for hole in dfi[holeid].unique():
316 | #print(hole)
317 |
318 | # See if unique hole exists in both dataframes
319 | if hole not in list(df0[holeid].unique()):
320 | print(hole + " does not have any intervals to join to.")
321 | continue
322 |
323 | # Set both new dataframes
324 | df0_temp = df0[df0[holeid] == hole].reset_index(drop=True)
325 | dfi_temp = dfi[dfi[holeid] == hole].reset_index(drop=True)
326 |
327 | # Add missing intervals for interval dataframe
328 | dfi_temp = add_missing_intervals(dfi_temp, holeid, fro, to).reset_index(drop=True)
329 |
330 | # Calculate composites and append per hole
331 | #edf = create_point_intervals_from_df(df0_temp, dfi_temp, holeid, depth, fro, to)
332 | compdf = composite_drillhole_point_custom(df0_temp, dfi_temp, holeid, depth, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
333 | compdfs.append(compdf)
334 |
335 | # Concatenate to form final df
336 | all_comp_df = pd.concat(compdfs).reset_index(drop=True)
337 |
338 | if combine_existing_data:
339 | all_comp_df = pd.merge(dfi, all_comp_df, on=[holeid, fro, to])
340 |
341 | return all_comp_df
342 |
343 | # %% ../nbs/Compositing/03_point_compositing.ipynb 23
344 | def composite_dataframe_point_custom_dask(
345 | df0: pd.DataFrame, # Pandas dataframe containing drillhole data
346 | dfi: pd.DataFrame, # dataframe containing interval information
347 | holeid: str, # the column name containing the hole id
348 | depth: str, # The column name containing the point depth values
349 | fro: str, # The column name containing the interval from values
350 | to: str, # The column name containing the interval from values
351 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
352 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
353 | columns_to_comp: str = None, # column names to include in the compositing
354 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
355 | combine_existing_data: bool=True # Combine with data from the custom dataframe
356 | )->pd.DataFrame:
357 | """This function takes in two pandas dataframes (df0 and dfi), a holeid, depth, fro, and to column names,
358 | a list of categorical statistics to calculate in the composite (catstats), a list of numerical statistics
359 | to calculate in the composite (numstats), an optional columns_to_comp parameter, an ignore_cat_nans boolean
360 | parameter, and a combine_existing_data boolean parameter. It then loops through each unique hole in the dfi
361 | dataframe and checks if it exists in both dataframes. If it does exist in both dataframes, it adds missing
362 | intervals for interval dataframe and calculates composites. Finally, it concatenates all composites into
363 | one final dataframe and returns it. If combine_existing_data is set to True, then it merges the dfi dataframe
364 | with the all composites dataframe on the holeid, fro, and to columns before returning it."""
365 |
366 | print(f'The following holes do not have intervals to join to: {[hole for hole in dfi[holeid].unique() if hole not in df0[holeid].unique()]}')
367 |
368 | # Create a Dask bag of the unique holes
369 | holes = db.from_sequence([hole for hole in dfi[holeid].unique() if hole in df0[holeid].unique()])
370 |
371 | # Map the composite_drillhole_custom function to each hole in parallel
372 | compdfs = holes.map(lambda hole: composite_drillhole_point_custom(
373 | df0[df0[holeid] == hole].reset_index(drop=True),
374 | dfi[dfi[holeid] == hole].reset_index(drop=True),
375 | holeid, depth, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans))
376 |
377 | # Compute the results and concatenate the resulting dataframes to form the final output
378 | all_comp_df = pd.concat(compdfs.compute())
379 |
380 | if combine_existing_data:
381 | all_comp_df = pd.merge(dfi, all_comp_df, on=[holeid, fro, to])
382 |
383 | return all_comp_df
384 |
385 |
386 | # %% ../nbs/Compositing/03_point_compositing.ipynb 25
387 | def composite_dataframe_point_custom_mp(
388 | df0: pd.DataFrame, # Pandas dataframe containing drillhole data
389 | dfi: pd.DataFrame, # dataframe containing interval information
390 | holeid: str, # the column name containing the hole id
391 | depth: str, # The column name containing the point depth values
392 | fro: str, # The column name containing the interval from values
393 | to: str, # The column name containing the interval from values
394 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
395 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
396 | columns_to_comp: str = None, # column names to include in the compositing
397 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
398 | combine_existing_data: bool=True # Combine with data from the custom dataframe
399 | )->pd.DataFrame:
400 | """This function takes in two pandas dataframes (df0 and dfi), a holeid, depth, fro, and to column names,
401 | a list of categorical statistics to calculate in the composite (catstats), a list of numerical statistics
402 | to calculate in the composite (numstats), an optional columns_to_comp parameter, an ignore_cat_nans boolean
403 | parameter, and a combine_existing_data boolean parameter. It then loops through each unique hole in the dfi
404 | dataframe and checks if it exists in both dataframes. If it does exist in both dataframes, it adds missing
405 | intervals for interval dataframe and calculates composites. Finally, it concatenates all composites into
406 | one final dataframe and returns it. If combine_existing_data is set to True, then it merges the dfi dataframe
407 | with the all composites dataframe on the holeid, fro, and to columns before returning it."""
408 |
409 | print(f'The following holes do not have intervals to join to: {[hole for hole in dfi[holeid].unique() if hole not in df0[holeid].unique()]}')
410 |
411 | holes = [hole for hole in dfi[holeid].unique() if hole in df0[holeid].unique()]
412 |
413 | def runit(hole):
414 | outdf = composite_drillhole_point_custom(
415 | df0[df0[holeid] == hole].reset_index(drop=True),
416 | dfi[dfi[holeid] == hole].reset_index(drop=True),
417 | holeid, depth, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans)
418 | return outdf
419 |
420 | num_processes = multiprocessing.cpu_count()
421 |
422 | #if __name__ == '__main__':
423 | with mp.Pool(processes=num_processes) as pool:
424 | results = pool.map(runit, holes)
425 |
426 | # Compute the results and concatenate the resulting dataframes to form the final output
427 | all_comp_df = pd.concat(results).reset_index(drop=True)
428 |
429 | if combine_existing_data:
430 | all_comp_df = pd.merge(dfi, all_comp_df, on=[holeid, fro, to])
431 |
432 | return all_comp_df
433 |
434 |
435 | # %% ../nbs/Compositing/03_point_compositing.ipynb 27
436 | def BIG_composite_dataframe_point_custom_dask(
437 | df0: pd.DataFrame, # Pandas dataframe containing drillhole data
438 | dfi: pd.DataFrame, # dataframe containing interval information
439 | holeid: str, # the column name containing the hole id
440 | depth: str, # The column name containing the point depth values
441 | fro: str, # The column name containing the interval from values
442 | to: str, # The column name containing the interval from values
443 | cat_stats: list = ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], # list of categorical statistics to calculate in the composite
444 | num_stats: list = ["weighted_average", "max", "min", "range"], # list of numerical statistics to calculate in the composite
445 | columns_to_comp: str = None, # column names to include in the compositing
446 | ignore_cat_nans: bool = True, # True to ignore missing categorical data
447 | combine_existing_data: bool=True # Combine with data from the custom dataframe
448 | )->pd.DataFrame:
449 | """This function takes a dataframe containing downhole point data and composites it to a desired interval.
450 | The function returns a dataframe with composited point data for all holes. Using DASK, it splits the data into
451 | 1000 hole subsets"""
452 |
453 | # Group by 1000 holes so it doesnt put to much overhead on dask
454 | grouped = df0.groupby((df0[holeid].rank(method='dense'))//1000)
455 |
456 | #Loop through and calculate for each group
457 | compdfs = []
458 | for _, group in grouped:
459 |
460 | igroup = dfi[dfi[holeid].isin(group[holeid].unique())].reset_index(drop=True)
461 | # process each group here
462 | _df = composite_dataframe_point_custom_dask(group, igroup, holeid, depth, fro, to, cat_stats, num_stats, columns_to_comp, ignore_cat_nans, combine_existing_data)
463 | compdfs.append(_df)
464 |
465 | #Concat to final
466 | dfout = pd.concat(compdfs)
467 |
468 | return dfout
469 |
--------------------------------------------------------------------------------
/DR_DC_APP/pages/02_⚖️_Compositing.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 |
3 | st.set_page_config(layout="wide")
4 |
5 | import warnings
6 |
7 | warnings.filterwarnings("ignore")
8 |
9 | from functools import reduce
10 | import pandas as pd
11 | from pandas.api.types import infer_dtype
12 | from PIL import Image
13 | import numpy as np
14 |
15 | import sys
16 |
17 | sys.path.append(r"./")
18 |
19 | from compositing_utils import *
20 | from interval_compositing import *
21 | from point_compositing import *
22 | from st_utils import *
23 |
24 |
25 | # for key in st.session_state.keys():
26 |
27 | # st.write(key)
28 | # st.write(st.session_state[key])
29 |
30 | customized_button = st.markdown(
31 | """
32 | """,
47 | unsafe_allow_html=True,
48 | )
49 |
50 | def get_best_match(
51 | list_of_options:list, # a list of strings to search through
52 | string:str, #the string that best matches
53 | )->str:
54 |
55 | if len(get_close_matches_icase(string, list_of_options)) > 0:
56 | best_match = str(list_of_options[(list_of_options).index(get_close_matches_icase(string, list_of_options)[0])])
57 | else:
58 | best_match = str(list_of_options[0])
59 |
60 | return best_match
61 |
62 | def rerun_page():
63 | st.rerun()
64 |
65 |
66 | add_logo()
67 |
68 | st.sidebar.markdown("*This web application proudly brought to you by **[Datarock](https://www.datarock.com.au)***")
69 |
70 | col1, col2 = st.columns([4, 1])
71 |
72 | if col2.button("Reset Page"):
73 | # Clear values from *all* memoized functions:
74 | st.session_state.clear()
75 | st.rerun()
76 |
77 | col1.markdown("# ⚖️ Compositing")
78 | st.markdown(
79 | """
80 | This section of the app composites or joins data to a selected interval.
81 | Having all available downhole datasets at standard intervals allows analysis of the relationships between them. Compositing is achieved by following these steps.
82 |
83 |
84 | ###### 1. Selecting Interval Option to Composite/Join To
85 | - This will be the interval of the final dataset. It can either be an interval set from an existing dataset (ie lithology or assay data) or it can be a select uniform interval (1m, 2m, 0.5m etc)
86 |
87 | ###### 2. Select Data to Composite (Join)
88 | - This selects the data you wish to composite to the interval from step 1. Multiple csv files are accepted as inputs and data can either be in interval format or in point format.
89 |
90 | ###### 3. Select Information for each table
91 | - Here the user selects which column names contain the HoleID, From and To (for interval data) or Depth (for point data) and which columns they would like to composite.
92 |
93 | ###### 4. Select Composite Statistics
94 | - Here the user selects which statistics will be calculated for the categorical and numerical data being composited.
95 |
96 |
97 | Once all these parameters have been selected and set, the user can download the composited data using the download link.
98 |
99 | _NOTE the application assumes that the data is clean and there is currently no in-app functionality to clean data (ie if you have -999 nan values they will be included within calculations)_.
100 | """
101 | )
102 | ######################################
103 | # Selecting Interval to join to
104 |
105 | st.markdown("#### 1. Selecting Interval to Composite/Join To")
106 | st.markdown(
107 | "There are two options to choose from when Compositing data, you can either composite to an existing interval (such as lithology or assay intervals from an existing table) or you can create your own standard interval across the dataset (eg 1m)"
108 | )
109 |
110 | image = Image.open("DR_DC_APP/Images/Join_Option.jpg")
111 | st.image(image, caption="Figure 1. An example of the two interval options to join to.")
112 |
113 | des_int = st.radio("Select Interval Option", ("Standard Interval", "Existing Interval"))
114 |
115 | if des_int == "Existing Interval":
116 | interval_file = st.file_uploader("Choose a csv file with interval data")
117 |
118 | # Stop if file not selected
119 | if not interval_file:
120 | st.stop()
121 |
122 | if "int_info" not in st.session_state:
123 | st.session_state["int_info"] = []
124 |
125 | # Read in file and show dataframe
126 | try:
127 | dfi = pd.read_csv(interval_file)
128 | except:
129 | dfi = pd.read_csv(interval_file, encoding="cp1252")
130 | st.dataframe(dfi.head())
131 |
132 | st.markdown("Select the HoleID, From and To columns from the existing interval table")
133 | form_name = 'Select_Interval_Data'
134 | with st.form(form_name, clear_on_submit=False):
135 | container = st.container()
136 |
137 | if f'session_state_besthole_i' not in st.session_state:
138 | st.session_state[f'session_state_besthole_i'] = get_best_match(list(dfi.columns), 'HOLE')
139 | if f'session_state_bestfro_i' not in st.session_state:
140 | st.session_state[f'session_state_bestfro_i'] = get_best_match(list(dfi.columns), 'FROM')
141 | if f'session_state_bestto_i' not in st.session_state:
142 | st.session_state[f'session_state_bestto_i'] = get_best_match(list(dfi.columns), 'TO')
143 | if f'Inty_Check' not in st.session_state:
144 | st.session_state['Inty_Check'] = 0
145 |
146 |
147 | iHOLEID = st.selectbox("Select Interval HOLEID", list(dfi.columns), index=list(dfi.columns).index(st.session_state[f'session_state_besthole_i']))
148 | iFROM = st.selectbox("Select Interval From Depth", list(dfi.columns), index=list(dfi.columns).index(st.session_state[f'session_state_bestfro_i']))
149 | iTO = st.selectbox("Select Interval To Depth", list(dfi.columns), index=list(dfi.columns).index(st.session_state[f'session_state_bestto_i']))
150 |
151 | # If button selected
152 | if st.form_submit_button("Submit", on_click=rerun_page):
153 | st.session_state[f'session_state_besthole_i'] = iHOLEID
154 | st.session_state[f'session_state_bestfro_i'] = iFROM
155 | st.session_state[f'session_state_bestto_i'] = iTO
156 | st.session_state['Inty_Check'] = 1
157 |
158 | st.write("Submitted!")
159 |
160 | # Renaming columns for holeid, from and to
161 | dfi = dfi.rename(columns={st.session_state[f'session_state_besthole_i']: "HOLEID", st.session_state[f'session_state_bestfro_i']: "FROM", st.session_state[f'session_state_bestto_i']: "TO"})
162 |
163 | if st.session_state["Inty_Check"] == 0:
164 | st.stop()
165 |
166 | # If standard interval get interval length
167 | elif des_int == "Standard Interval":
168 | st.markdown("Select the interval you wish to have the data composited to (m):")
169 |
170 | if 'session_state_interval2comp' not in st.session_state:
171 | st.session_state['session_state_interval2comp'] = 0
172 |
173 | form_name = 'Select_Interval'
174 | with st.form(form_name, clear_on_submit=False):
175 | container = st.container()
176 | interval = container.text_input("Insert an interval", value=str(st.session_state['session_state_interval2comp']))
177 | interval = float(interval)
178 |
179 | # If button selected
180 | if st.form_submit_button("Submit", on_click=rerun_page):
181 | st.session_state['session_state_interval2comp'] = interval
182 | st.write("Submitted!")
183 |
184 | if st.session_state["session_state_interval2comp"] == 0:
185 | st.stop()
186 |
187 |
188 | ###############################
189 | # LOADING IN DATA
190 | st.markdown("#### 2. Select Data to Composite (Join)")
191 | st.markdown("Select which drillhole data you wish to be composited or joined to the interval data selected above. You can select multiple .csv files")
192 |
193 | # Read in data to composite and select a certain hole to run example on
194 | comp_file = st.file_uploader("Choose a File to Composite", accept_multiple_files=True)
195 |
196 | if not comp_file:
197 | st.stop()
198 |
199 | # df = pd.read_csv(comp_file)
200 | try:
201 | dfs = [[comp_fil.name, pd.read_csv(comp_fil)] for comp_fil in comp_file]
202 | except:
203 | dfs = [[comp_fil.name, pd.read_csv(comp_fil, encoding="cp1252")] for comp_fil in comp_file]
204 |
205 | #####################################################
206 | # RENAMING AND STORING DATA
207 | st.markdown("#### 3. Select Information for Each Table")
208 | st.markdown("For each uploaded dataset, select the columns containing the Hole ID, From and To values. You also select which columns you want to include in the compositing.")
209 |
210 | # Create a column object based on number of dataframes
211 | x = st.columns(len(dfs))
212 |
213 | # Loop through each column and rename data
214 | dfs2 = []
215 | for idx, df in enumerate(dfs):
216 |
217 | # Set dataframe and determine column type, if is mixed with string, set as entirely string
218 | ndf = df[1]
219 | for col in ndf.columns:
220 | if infer_dtype(ndf[col]) == "mixed-integer":
221 | ndf[col] = ndf[col].astype(str)
222 |
223 | if f'session_state_dfname_{idx}' not in st.session_state:
224 | st.session_state[f'session_state_dfname_{idx}'] = df[0]
225 | if f'session_state_df_{idx}' not in st.session_state:
226 | st.session_state[f'session_state_df_{idx}'] = 0
227 | if f'session_state_depthtype_{idx}' not in st.session_state:
228 | st.session_state[f'session_state_depthtype_{idx}'] = 'Interval'
229 | if f'session_state_besthole_{idx}' not in st.session_state:
230 | st.session_state[f'session_state_besthole_{idx}'] = get_best_match(list(ndf.columns), 'HOLE')
231 | if f'session_state_bestfro_{idx}' not in st.session_state:
232 | st.session_state[f'session_state_bestfro_{idx}'] = get_best_match(list(ndf.columns), 'FROM')
233 | if f'session_state_bestto_{idx}' not in st.session_state:
234 | st.session_state[f'session_state_bestto_{idx}'] = get_best_match(list(ndf.columns), 'TO')
235 | if f'session_state_bestdepth_{idx}' not in st.session_state:
236 | st.session_state[f'session_state_bestdepth_{idx}'] = get_best_match(list(ndf.columns), 'DEPTH')
237 | if f'session_state_cols2select_{idx}' not in st.session_state:
238 | st.session_state[f'session_state_cols2select_{idx}'] = []
239 | if f'Inty_Checker_comp{idx}' not in st.session_state:
240 | st.session_state[f'Inty_Checker_comp{idx}'] =0
241 |
242 | with x[idx]:
243 |
244 |
245 | # Write each dataframe title and head
246 | st.markdown(f"""__{df[0]}__""".replace(".csv", ""), unsafe_allow_html=True)
247 | st.dataframe(ndf.head(3))
248 |
249 | # Select whether data is interval or point type
250 | depth_type_list = ["Interval", "Point"]
251 |
252 | depth_type = st.radio("Depth Type", depth_type_list, horizontal=True, index = list(depth_type_list).index(st.session_state[f'session_state_depthtype_{idx}']), key=f"radio_check_{idx}")
253 |
254 |
255 |
256 | container = st.container()
257 |
258 | # For interval data
259 | if depth_type == "Interval":
260 |
261 | HOLEID = container.selectbox("Select HOLEID", list(ndf.columns), index=list(ndf.columns).index(st.session_state[f'session_state_besthole_{idx}']))
262 | FROM = container.selectbox("Select From", list(ndf.columns), index=list(ndf.columns).index(st.session_state[f'session_state_bestfro_{idx}']))
263 | TO = container.selectbox("Select To", list(ndf.columns), index=list(ndf.columns).index(st.session_state[f'session_state_bestto_{idx}']))
264 |
265 | # Rename data
266 | ndf = ndf.rename(columns={HOLEID: "HOLEID", FROM: "FROM", TO: "TO"})
267 |
268 | # For point data
269 | elif depth_type == "Point":
270 |
271 | HOLEID = container.selectbox("Select HOLEID", list(ndf.columns), index=list(ndf.columns).index(st.session_state[f'session_state_besthole_{idx}']))
272 | DEPTH = container.selectbox("Select DEPTH", list(ndf.columns), index=list(ndf.columns).index(st.session_state[f'session_state_bestdepth_{idx}']))
273 |
274 | # Rename columns
275 | ndf = ndf.rename(columns={HOLEID: "HOLEID", DEPTH: "DEPTH"})
276 |
277 | with st.form(f'Comp_Form_{idx}', clear_on_submit=False):
278 |
279 |
280 | cols_to_select = [col for col in ndf.columns if col not in ["HOLEID", "FROM", "TO", "DEPTH"]]
281 | select_all_toggle = st.checkbox("Select all", key=f"tdata_all_check_{idx}")
282 |
283 | if select_all_toggle:
284 | comp_selected_columns = st.multiselect("Select Data to Composite:", options=cols_to_select, default=cols_to_select)
285 | else:
286 | comp_selected_columns = st.multiselect("Select Data to Composite:", options=cols_to_select, default=st.session_state[f'session_state_cols2select_{idx}'])
287 |
288 | def update_sesh():
289 | st.session_state[f'session_state_cols2select_{idx}'] = comp_selected_columns
290 | st.session_state[f'session_state_dfname_{idx}'] = df[0]
291 | st.session_state[f'session_state_df_{idx}'] = ndf
292 | st.session_state[f'session_state_besthole_{idx}'] = HOLEID
293 | st.session_state[f'session_state_depthtype_{idx}'] = depth_type
294 | # st.session_state[f'Select_Button_{idx}'] = True
295 |
296 | if depth_type == "Interval":
297 | st.session_state[f'session_state_bestfro_{idx}'] = FROM
298 | st.session_state[f'session_state_bestto_{idx}'] = TO
299 | elif depth_type == "Point":
300 | st.session_state[f'session_state_bestdepth_{idx}'] = DEPTH
301 |
302 | if st.form_submit_button('Submit', on_click=update_sesh):
303 | st.session_state[f'session_state_cols2select_{idx}'] = comp_selected_columns
304 | st.session_state[f'Inty_Checker_comp{idx}'] = 1
305 | st.write('Submitted')
306 |
307 | if st.session_state[f'Inty_Checker_comp{idx}']==0:
308 | st.stop()
309 |
310 |
311 |
312 |
313 | ##################################################
314 | # Selecting Compositing Options
315 | st.markdown("#### 4. Select Compositing Statistics")
316 | st.markdown(
317 | """Select the statistics you wish to calculate for the categorical and numerical composites.
318 | For categorical data, the options to calculate include the Maximum Overlap, Maximum Overlap Weight, Unique Class Count, Enclosed Class Count and Minimum Overlap.
319 | For numerical data, the options to calculate include Weighted Average, Maximum Value, Minimum Value and Range of Values.
320 | The image below provides a visual represntation of these options"""
321 | )
322 |
323 | image = Image.open("DR_DC_APP/Images/Compositing_Example.jpg")
324 | st.image(image, caption="Figure 2. Examples of the various statistics to calculate.")
325 |
326 | stat_options = {
327 | "max_overlap": "Maximum Overlap",
328 | "max_overlap_weight": "Maximum Overlap Weight",
329 | "unique_class_count": "Unique Class Count",
330 | "enclosed_class_count": "Enclosed Class Count",
331 | "min_overlap": "Minimum Overlap",
332 | "weighted_average": "Weighted Average",
333 | "max": "Maximum Value",
334 | "min": "Minimum Value",
335 | "range": "Range of Values",
336 | }
337 |
338 | if 'session_state_cats' not in st.session_state:
339 | st.session_state['session_state_cats'] = ['max_overlap']
340 | if 'session_state_nums' not in st.session_state:
341 | st.session_state['session_state_nums'] = ['weighted_average']
342 | if 'composite_switch' not in st.session_state:
343 | st.session_state['composite_switch'] = 0
344 |
345 | # Make form
346 | with st.form('Selecting_Comp_Types', clear_on_submit=False):
347 | c1, c2 = st.columns(2)
348 |
349 | # Select list items widget
350 | with c1:
351 | # Set up container with option to select all holes
352 | container = st.container()
353 | select_all_toggle = st.checkbox("Select all", key="mineral_all_check")
354 | if select_all_toggle:
355 | selected_cats = container.multiselect('Select Categorical Composite Statistics:', ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], format_func=lambda x: stat_options.get(x))
356 | else:
357 | selected_cats = container.multiselect('Select Categorical Composite Statistics:', ["max_overlap", "max_overlap_weight", "unique_class_count", "enclosed_class_count", "min_overlap"], default=st.session_state['session_state_cats'], format_func=lambda x: stat_options.get(x))
358 |
359 | with c2:
360 | # Set up container with option to select all holes
361 | container = st.container()
362 | select_all_toggle = st.checkbox("Select all", key="feature_all_check")
363 | if select_all_toggle:
364 | selected_nums = container.multiselect('Select Numerical Composite Statistics:', ["weighted_average", "max", "min", "range"], ["weighted_average", "max", "min", "range"], format_func=lambda x: stat_options.get(x))
365 | else:
366 | selected_nums = container.multiselect('Select Numerical Composite Statistics:', ["weighted_average", "max", "min", "range"], default=st.session_state['session_state_nums'], format_func=lambda x: stat_options.get(x))
367 |
368 | # If button selected
369 | if st.form_submit_button("Submit"):
370 | st.session_state['session_state_cats'] = selected_cats
371 | st.session_state['session_state_nums'] = selected_nums
372 | st.write("Submitted!")
373 | st.session_state['composite_switch'] = 1
374 | st.rerun()
375 |
376 | if st.session_state['composite_switch'] == 0:
377 | st.stop()
378 |
379 | #######################################
380 | # Compositing data
381 |
382 |
383 | # Get names
384 | namelist = [comp_file[x].name for x in range(len(dfs))]
385 |
386 | dfnameslist = [st.session_state[f'session_state_dfname_{x}'] for x in range(len(dfs))]
387 | dflist = [st.session_state[f'session_state_df_{x}'] for x in range(len(dfs))]
388 | depthtypelist = [st.session_state[f'session_state_depthtype_{x}'] for x in range(len(dfs))]
389 | comperslist = [st.session_state[f'session_state_cols2select_{x}'] for x in range(len(dfs))]
390 |
391 | catstats_to_calc = st.session_state['session_state_cats']
392 | numstats_to_calc = st.session_state['session_state_nums']
393 |
394 | nan_option=False
395 | dfs2 = []
396 | for idx in range(len(dfs)):
397 | dfs2.append([dfnameslist[idx], dflist[idx], depthtypelist[idx], comperslist[idx]])
398 |
399 | if len(dfs2) < len(dfs):
400 | st.write("Something Wrong")
401 | st.stop()
402 |
403 | # for x in range(len(dfs)):
404 | # if f'session_state_dfname_{idx}' in st.session_state:
405 | # del st.session_state[f'session_state_dfname_{x}']
406 | # if f'session_state_df_{idx}' in st.session_state:
407 | # del st.session_state[f'session_state_df_{x}']
408 | # if f'session_state_depthtype_{idx}' in st.session_state:
409 | # del st.session_state[f'session_state_depthtype_{x}']
410 | # if f'session_state_cols2select_{idx}' in st.session_state:
411 | # del st.session_state[f'session_state_cols2select_{x}']
412 | # if 'composite_switch' in st.session_state:
413 | # del st.session_state['composite_switch']
414 | # del dfs
415 |
416 | if des_int == "Existing Interval":
417 |
418 | # Loop through dataframes
419 | compdfs = []
420 | for idx, df0 in enumerate(dfs2):
421 |
422 | st.write(f"#### {namelist[idx]}".replace(".csv", ""))
423 |
424 | # Check holes have correct data associated
425 | check_streamlit_holes(df0[1], dfi)
426 |
427 | with st.spinner("Compositing Data"):
428 |
429 | if df0[2] == "Interval":
430 |
431 | # Function for compositing
432 | @st.cache(show_spinner=False)
433 | def compodfo_custom():
434 | composite_df = composite_dataframe_custom_mp(
435 | df0 = df0[1],
436 | dfi = dfi,
437 | holeid = "HOLEID",
438 | fro = "FROM",
439 | to = "TO",
440 | cat_stats=catstats_to_calc,
441 | num_stats=numstats_to_calc,
442 | columns_to_comp=df0[3],
443 | ignore_cat_nans=nan_option,
444 | combine_existing_data=False
445 | )
446 |
447 | if "Overlapping_Bins" in composite_df.columns:
448 | composite_df = composite_df.drop(["Overlapping_Bins"], axis=1)
449 | return composite_df
450 |
451 | # Run for each dataframe
452 | temp_comp = compodfo_custom()
453 | compdfs.append(temp_comp)
454 |
455 | elif df0[2] == "Point":
456 |
457 | @st.cache(show_spinner=False)
458 | def compodfo_point_custom():
459 | composite_df = composite_dataframe_point_custom_mp(
460 | df0[1], dfi, "HOLEID", "DEPTH", "FROM", "TO", cat_stats=catstats_to_calc, num_stats=numstats_to_calc, columns_to_comp=df0[3], ignore_cat_nans=nan_option
461 | )
462 | if "Overlapping_Bins" in composite_df.columns:
463 | composite_df = composite_df.drop(["Overlapping_Bins"], axis=1)
464 |
465 | return composite_df
466 |
467 | temp_comp = compodfo_point_custom()
468 | compdfs.append(temp_comp)
469 |
470 | # Merge all
471 | cdf = reduce(lambda df1, df2: pd.merge(df1, df2, on=["HOLEID", "FROM", "TO"], how="outer"), compdfs)
472 | cdf = pd.merge(dfi, cdf, how="left", on=["HOLEID", "FROM", "TO"])
473 |
474 |
475 | elif des_int == "Standard Interval":
476 |
477 | compdfs = []
478 | for idx, df0 in enumerate(dfs2):
479 |
480 | st.write(f"##### Running - {namelist[idx]}".replace(".csv", ""))
481 | ndf = df0[1]
482 |
483 | for col in ndf.columns:
484 | if infer_dtype(ndf[col]) == "mixed-integer":
485 | ndf[col] = ndf[col].astype(str)
486 |
487 | with st.spinner("Compositing Data"):
488 |
489 | if df0[2] == "Interval":
490 |
491 | if not np.issubdtype(ndf['FROM'], np.number) and np.issubdtype(ndf['TO'], np.number):
492 | st.error('ERROR: From and To selection are not numeric')
493 |
494 | @st.cache(show_spinner=False)
495 | def compodfo():
496 | composite_df = composite_dataframe_set_mp(
497 | ndf,
498 | "HOLEID",
499 | "FROM",
500 | "TO",
501 | interval=interval,
502 | cat_stats=catstats_to_calc,
503 | num_stats=numstats_to_calc,
504 | columns_to_comp=df0[3],
505 | ignore_cat_nans=nan_option
506 | )
507 | return composite_df
508 |
509 | composite_df = compodfo()
510 | if "Overlapping_Bins" in composite_df.columns:
511 | composite_df = composite_df.drop(["Overlapping_Bins"], axis=1)
512 | compdfs.append(composite_df)
513 | # x[idx].dataframe(composite_df)
514 |
515 | elif df0[2] == "Point":
516 | if not np.issubdtype(ndf['DEPTH'], np.number):
517 | st.error('ERROR: From and To selection are not numeric')
518 |
519 | @st.cache(show_spinner=False)
520 | def compodfo_point():
521 | composite_df = composite_point_dataframe_set_mp(
522 | ndf,
523 | "HOLEID",
524 | "DEPTH",
525 | interval=interval,
526 | cat_stats=catstats_to_calc,
527 | num_stats=numstats_to_calc,
528 | columns_to_comp=df0[3],
529 | ignore_cat_nans=nan_option
530 | )
531 | if "Overlapping_Bins" in composite_df.columns:
532 | composite_df = composite_df.drop(["Overlapping_Bins"], axis=1)
533 |
534 | return composite_df
535 |
536 | composite_df = compodfo_point()
537 | compdfs.append(composite_df)
538 | # x[idx].dataframe(composite_df)
539 |
540 | cdf = reduce(lambda df1, df2: pd.merge(df1, df2, on=["HOLEID", "FROM", "TO"], how="outer"), compdfs)
541 |
542 | # st.dataframe(cdf)
543 |
544 | ########################################
545 | # Download
546 |
547 | title_alignment = """
548 |
553 | """
554 |
555 | # _, col2, _ = st.columns([2.75, 3, 2])
556 | # col2.markdown("### Download Composited Dataframe")
557 | text = " Download Composited Dataframe"
558 | st.markdown(f"### {text}
", unsafe_allow_html=True)
559 |
560 |
561 | @st.cache_data(max_entries=1)
562 | def convert_df(df):
563 | # IMPORTANT: Cache the conversion to prevent computation on every rerun
564 | return df.to_csv(index=False).encode("utf-8")
565 |
566 | def stop_comp():
567 | st.session_state['composite_switch'] = 0
568 |
569 |
570 | _, col2, _ = st.columns([3.2, 3, 2])
571 | csv = convert_df(cdf)
572 | st.download_button(
573 | label="Download Composited Data",
574 | data=csv,
575 | file_name="composited_data.csv",
576 | mime="text/csv",
577 | on_click=stop_comp
578 | )
579 | ########################################
580 | # Plotting check to make sure composite ran correctly (issues with numeric points)
581 |
582 | st.markdown("# Plotting Check")
583 |
584 | # Get dataframe names
585 | selected_df_name = st.selectbox("Select Dataframe to Plot:", [name.replace(".csv", "") for name in namelist])
586 | selected_df_name = selected_df_name + ".csv"
587 |
588 | for df_info in dfs2:
589 | if (df_info[0] == selected_df_name) & (df_info[2] == "Point"):
590 | st.write("### Plotting Point Interval Data not currently supported")
591 | st.stop()
592 |
593 |
594 | # Select drillhole and data
595 | selected_drillhole = st.selectbox("Select Drillholes to Plot:", cdf["HOLEID"].unique())
596 |
597 | odf = [x[1] for x in dfs2 if selected_df_name in x[0]][0]
598 | plot_names = [x[3] for x in dfs2 if selected_df_name in x[0]][0]
599 |
600 | # try:
601 | # i = 0
602 | # while i < len(plot_names):
603 | # try:
604 | selected_columns = st.multiselect("Select Downhole Data to Plot:", plot_names, plot_names[idx])
605 |
606 | if len(selected_columns) == 0:
607 | st.stop()
608 |
609 | # Get info from dataframe
610 | odf = odf[odf["HOLEID"] == selected_drillhole].sort_values("FROM").reset_index(drop=True)
611 | odf = odf[["HOLEID", "FROM", "TO"] + selected_columns]
612 |
613 | holedf = cdf[cdf["HOLEID"] == selected_drillhole].sort_values("FROM").reset_index(drop=True)
614 |
615 | # Plotting the extra values in order
616 | categoricals = []
617 | numericals = []
618 | for selected_extra in selected_columns:
619 | if not np.issubdtype(odf[selected_extra].dtype, np.number):
620 | odf.loc[odf[selected_extra].isnull(),selected_extra] = 'None'
621 | odf.loc[odf[selected_extra]=='nan', selected_extra] = 'None'
622 |
623 | holedf.loc[holedf[selected_extra+'_max_overlap'].isnull(), selected_extra+'_max_overlap'] = 'None'
624 | holedf.loc[holedf[selected_extra+'_max_overlap']=='nan', selected_extra+'_max_overlap'] = 'None'
625 | categoricals.append(selected_extra)
626 | else:
627 | odf.loc[odf[selected_extra].isnull(),selected_extra] = np.nan
628 | # holedf.loc[holedf[selected_extra].isnull(),'selected_extra'] = np.nan
629 | numericals.append(selected_extra)
630 |
631 | if "FROM" not in odf.columns:
632 | odf["FROM"] = odf["DEPTH"]
633 | odf["TO"] = odf["DEPTH"].shift(1)
634 |
635 |
636 |
637 | # Plot comparison
638 | fig = composite_comparison_plot(
639 | odf.sort_values("FROM").reset_index(drop=True),
640 | holedf,
641 | categoricals,
642 | numericals,
643 | "FROM",
644 | "TO",
645 | )
646 | names = set()
647 | fig.for_each_trace(lambda trace: trace.update(showlegend=False) if (trace.name in names) else names.add(trace.name))
648 |
649 | st.plotly_chart(fig, use_container_width=True, theme=None)
650 | # except:
651 | # st.write("### Unable to plot variable")
652 |
653 | # except:
654 | # st.write("### Plotting not available for this dataset")
655 |
656 |
657 | # with st.form("Hole Selection", clear_on_submit=False):
658 | # if st.form_submit_button("Submit Composited Data"):
659 | # st.session_state["composite_df"] = cdf
660 | # st.write("Submitted!")
661 |
--------------------------------------------------------------------------------