Skip to content
Snippets Groups Projects
Commit 919d290a authored by Alexander Martínez Méndez's avatar Alexander Martínez Méndez
Browse files

Agregar archivos iniciales

parent 3ad01915
No related branches found
No related tags found
No related merge requests found
.ipynb_checkpoints/amplitude-checkpoint.png

30.6 KiB

%% Cell type:markdown id:a4d202eb-9d92-4857-b2af-aba670f9090b tags:
# Análisis de Datos Racimo Tormenta
%% Cell type:markdown id:52bc4c5f-1baf-4e8f-8a1d-0b0e51fa8695 tags:
Notebook para el análisis de datos del proyecto racimo tormenta
%% Cell type:markdown id:d277a53c-cccc-4996-8944-620130575372 tags:
## Librerias
%% Cell type:markdown id:e96a3270-365f-4f90-9d5a-d2673f176f11 tags:
Importar las librerias necesarias para el análisis e interacciones de los datos
%% Cell type:code id:5d4d6478-6b92-46a4-a849-7fc4d5a3b119 tags:
``` python
import numpy as np
import matplotlib.pylab as plt
import scipy
from scipy import stats
from scipy.fftpack import fftfreq, irfft, rfft
import sys
import os
from matplotlib import cm
from matplotlib.colors import ListedColormap, LinearSegmentedColormap
import math
import datetime as datetime
import time
import matplotlib.dates as md
%matplotlib inline
sys.getdefaultencoding()
```
%% Output
'utf-8'
%% Cell type:markdown id:960fdf13-2b85-4fca-a1fb-ed0aa016e468 tags:
## Calibración del detector
%% Cell type:markdown id:3b8b509b-c69e-4c8d-98e2-6f9d7cc825c3 tags:
Calibración de las mediciones del detector
%% Cell type:markdown id:440109c1-3272-4702-9a0c-fd5e2b8e6b1d tags:
### Vista preliminar de los datos de calibración
%% Cell type:markdown id:4ed58223-c091-4de4-b9c7-2cf4b3dd674f tags:
Cargar datos
%% Cell type:code id:78a31578-66f3-4363-80b4-856aabede298 tags:
``` python
data = np.loadtxt('../Data/Lighting_2021_04_13_00_6.dat', comments='#')
```
%% Cell type:markdown id:7c03fdf2-6a4a-4d30-87e3-3b025a43f6c1 tags:
Ver encabezado
%% Cell type:code id:c5141991-6dd0-42d6-a4bd-4cfe0a164bba tags:
``` python
data
```
%% Output
array([[0.00000e+00, 0.00000e+00],
[1.00000e-05, 9.00000e+01],
[2.00000e-05, 3.90000e+01],
...,
[1.19995e+00, 3.30000e+01],
[1.19996e+00, 5.30000e+01],
[1.19997e+00, 1.80000e+01]])
%% Cell type:markdown id:c754f55c-22ff-46b9-a414-7fa07a4497d6 tags:
### Amplitud y frecuencia de la señal
%% Cell type:markdown id:badb8819-ef05-4e8c-867c-8b4a03f554d2 tags:
Función para graficar amplitud y frecuencia.
%% Cell type:code id:05f9f0b6-dd57-4e7e-92ca-4718cf85808e tags:
``` python
def Lightning_Analysis(data, dt, Np):
mean = np.mean(data[:,1])
sigma = np.std(data[:,1])
peaks = []
MTFt = [] # Multiple-termination flash (MTF) relative times
MTSt = [] # Multiple.termination stroke (MTS) relative times
MTSv = []
T_count = 0 # Terminations counter
MTFc = 1 # MTF counter
MTSc = 0 # MTS counter
N = len(data)
MTSw = 1e-3 # time window for differentiating MTF and MTS events
threshold = mean + 5*sigma # Peak threshold
# Termination identification
for i in range(N):
if (data[i,1] > threshold):
T_count += 1
peaks.append(i)
t1 = data[i,0]
if T_count > 1:
Td = t1 - t0
if Td > MTSw:
MTFt.append(Td)
MTFc += 1
MTSv.append(MTSc)
MTSc = 0
else:
MTSt.append(Td)
MTSc += 1
t0 = t1
print (u'Terminations above 5\u03C3 = %d\n' %T_count)
print (u'Number of strokes = %d\n' %MTFc)
s = data[:,1]
Y = np.fft.fft(s)
N = len(Y)/2+1
fa = 1.0/dt
X = np.linspace(0, fa/2, int(N), endpoint=True)
sfft = np.abs(Y[:int(N)])
print('Sample Time = %.5f s' % dt)
print('Frequency = %.2f Hz' % fa)
sfft = np.array(sfft)
pos = int(np.where(sfft[1:-1] == np.amax(sfft[1:-1]))[0])
frec_pico = 868.35 # X[pos+1]
print ("Maximum frequency = %.2f Hz" %frec_pico)
if T_count >= Np:
# Signal plot
plt.figure(figsize = (16,4))
plt.subplot(1,2,1)
plt.plot(data[:,0], data[:,1])
plt.axhline(threshold, color='red')
plt.xlabel('Time [s]', fontsize = 20)
plt.ylabel('Amplitude [ADC]', fontsize = 20)
plt.savefig("amplitude.png", dpi=150)
# Spectrum plotting
plt.subplot(1,2,2)
plt.axvline(frec_pico, color='red')
plt.loglog(X, sfft)
plt.xlabel('Frequency [Hz]', fontsize = 20)
plt.axis([1e-1,1e5,1e1,1e7])
plt.grid()
plt.show()
return frec_pico, peaks, MTFt, MTSt, T_count, MTFc, MTSv
```
%% Cell type:markdown id:4da5b0b2-97c4-455b-b9f9-028e817f3fa9 tags:
Graficar
%% Cell type:code id:66982705-0db4-43d7-8bd0-22ec2ae7b907 tags:
``` python
dt = 10e-6 # sampling period
Np = 0. # filter signals per number of peaks above 5 sigma
fp1, peaks1, MTFt, MTSt, pN1, MTFc, MTSc = Lightning_Analysis(data, dt, Np) # Returns maximum peak frequency and peak positions
```
%% Output
Terminations above 5σ = 7
Number of strokes = 1
Sample Time = 0.00001 s
Frequency = 100000.00 Hz
Maximum frequency = 868.35 Hz
%% Cell type:code id:2c168963-a095-45b0-bcf1-e7943b3ba423 tags:
``` python
```
source diff could not be displayed: it is too large. Options to address this: view the blob.
...@@ -6,5 +6,5 @@ Repositorio de análisis de datos del proyecto Racimo Tormenta. ...@@ -6,5 +6,5 @@ Repositorio de análisis de datos del proyecto Racimo Tormenta.
- Calibración del instrumento. - Calibración del instrumento.
- Datos: [https://dataverse.redclara.net/dataverse/storm](https://dataverse.redclara.net/dataverse/storm) - Datos: [https://dataverse.redclara.net/dataverse/storm](https://dataverse.redclara.net/dataverse/storm)
- Código: [https://gitmilab.redclara.net/mxrtinez1/analisis-de-datos](https://gitmilab.redclara.net/mxrtinez1/analisis-de-datos) - Código: [https://gitmilab.redclara.net/halleyUIS/limonet/analisis-de-datos](https://gitmilab.redclara.net/halleyUIS/limonet/analisis-de-datos)
- Artículo: [https://www.overleaf.com/read/yhvybynwktjv](https://www.overleaf.com/read/yhvybynwktjv) - Artículo: [https://www.overleaf.com/read/yhvybynwktjv](https://www.overleaf.com/read/yhvybynwktjv)
\ No newline at end of file
%% Cell type:markdown id:ff1fd295 tags:
# Script for automatically uploading LiMoNet data to a Dataverse repository
%% Cell type:markdown id:750b30d8 tags:
This script uploads data collected by the LiMoNet (Lightning Monitoring Network) to a Dataverse repository. The code firstly load the python packages needed for the connection to dataverse, load metadata from a **.json** file and search data files in a folder. We define some functions: **create_dataset**, **modify_metadata**, **load_metadata** and **upload_data**. For more information, some references are listed along the script.
Author: J. Peña-Rodríguez
2021
%% Cell type:code id:5402e405 tags:
``` python
from dataverse import Connection
import numpy as np
import sys
import os
import dataverse
from lxml import etree
import json
import glob
import datetime
%matplotlib inline
sys.getdefaultencoding()
```
%% Cell type:code id:de9b2c88 tags:
``` python
def progressbar(it, prefix="", size=60, file=sys.stdout):
# Progress bar animation
count = len(it)
def show(j):
x = int(size*j/count)
file.write("%s[%s%s] %i/%i\r" % (prefix, "#"*x, "."*(size-x), j, count))
file.flush()
show(0)
for i, item in enumerate(it):
yield item
show(i+1)
file.write("\n")
file.flush()
```
%% Cell type:code id:9e50bd8b tags:
``` python
def create_dataset(dataset_name):
# Metadata
# https://docs.python.org/3/library/xml.etree.elementtree.html
# https://www.tutorialspoint.com/python3/python_xml_processing.htm
# https://lxml.de/2.0/parsing.html
# https://github.com/IQSS/dataverse-client-python
description = 'This repository contains lightning data files recorded by LiMoNet at Bucaramanga, Colombia.'
creator = 'Peña, Jesús'
# Create dataset
dataset_id = dataverse.Dataverse.create_dataset(dataverse_id, dataset_name, description, creator)
return dataset_id
```
%% Cell type:markdown id:5c527720 tags:
Los campos del archivo .json tienen palabras claves que se pueden encontrar aquí:
https://guides.dataverse.org/en/4.18.1/_downloads/dataset-create-new-all-default-fields.json
%% Cell type:code id:83d72718 tags:
``` python
def modify_metadata(dataset_name, date):
# Modify the metadata file metadata_limonet.json
# Modified metadata fields: title and dates
# All the fields can be midified depending on your necessity
date = date
title = dataset_name
with open("metadata_limonet.json", 'r') as f:
json_data = json.load(f)
json_data['metadataBlocks']['citation']['fields'][3]['value'][0]['dsDescriptionDate']['value']= date
json_data['metadataBlocks']['citation']['fields'][8]['value']= date
json_data['metadataBlocks']['citation']['fields'][0]['value']= title
with open('metadata_limonet.json', 'w') as f:
json.dump(json_data, f, indent = 2)
```
%% Cell type:code id:98f30841 tags:
``` python
def load_metadata(dataset_id):
# Update the repository metadata
metadata_file = open("metadata_limonet.json",)
# Returns JSON object as a dictionary
metadata = json.load(metadata_file)
# Get metadata
dataset_id.update_metadata(metadata)
```
%% Cell type:code id:3e2ffe24 tags:
``` python
def upload_data(dataset_id, day):
# Upload data
# ej: dataset_id.upload_filepath('Lightning/Lighting_2021_04_01_18_52.dat')
files = sorted(glob.glob("Lightning/Lighting_" + day + "*.dat")) # Sort datafiles
M = len(files)
for i in progressbar(range(M), "Uploading: ", 50):
dataset_id.upload_filepath(files[i])
print ('\nData uploaded\n')
```
%% Cell type:markdown id:16a19fd8 tags:
## Upload data
%% Cell type:markdown id:151fd3dd-8ef1-4f5c-89a3-ec116949cf36 tags:
Ir a https://dataverse.redclara.net/dataverseuser.xhtml?selectTab=apiTokenTab y copiar el **Token** de usuario
%% Cell type:code id:283b362e tags:
``` python
%env API_TOKEN=TOKENdeUSUARIO
```
%% Cell type:code id:8d8bc25f tags:
``` python
API_TOKEN = os.environ['API_TOKEN']
host = 'dataverse.redclara.net' # All clients >4.0 are supported
# Conexión a repositorio
connection = Connection(host, API_TOKEN)
# Selección de dataverse a user
dataverse_id = connection.get_dataverse('ticec') # Dataverse id
```
%% Cell type:code id:a437ef59 tags:
``` python
year = '2021'
month = '08'
now = datetime.datetime.now()
upload_date = ("%s-%s-%s" % (now.year, now.month, now.day))
for i in range(1,30):
day = str(i).zfill(2)
file_date = ("%s_%s_%s" % (year, month, day))
file_name = ("Lightning/Lighting_%s*.dat" % file_date)
files = glob.glob(file_name)
M = len(files)
if files != []: # Check files existence
dataset_name = ("%s-LM_%s" % (os.environ['USER'],file_date))
print ("Files: %s Dataset: %s Date: %s" % (file_name, dataset_name, upload_date))
dataset_id = create_dataset(dataset_name)
modify_metadata(dataset_name, upload_date)
load_metadata(dataset_id)
upload_data(dataset_id, file_date)
```
amplitude.png

54.9 KiB

0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment