187 lines
5.8 KiB
Python
187 lines
5.8 KiB
Python
|
# -*- coding: utf-8 -*-
|
||
|
"""6338003821AdvComProgFinalProjectServer_v1.1.ipynb
|
||
|
|
||
|
Automatically generated by Colaboratory.
|
||
|
|
||
|
Original file is located at
|
||
|
https://colab.research.google.com/drive/1B0ihZ275Hw0V034W4eFtKc-wxwkg7iLS
|
||
|
"""
|
||
|
|
||
|
! pip install pymongo[srv]
|
||
|
|
||
|
! pip install flask_ngrok
|
||
|
! pip install flask_cors
|
||
|
|
||
|
# Write all of your functions in this cell
|
||
|
from flask import Flask,request,flash, request, redirect, url_for, Response,jsonify
|
||
|
from flask_ngrok import run_with_ngrok
|
||
|
from flask_cors import CORS, cross_origin
|
||
|
import pymongo
|
||
|
import json
|
||
|
import os
|
||
|
from werkzeug.utils import secure_filename
|
||
|
import pandas as pd
|
||
|
|
||
|
UPLOAD_FOLDER = './'
|
||
|
ALLOWED_EXTENSIONS = {'txt', 'csv','json'}
|
||
|
|
||
|
app = Flask(__name__)
|
||
|
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
|
||
|
CORS(app, support_credentials=True)
|
||
|
run_with_ngrok(app)
|
||
|
|
||
|
client = pymongo.MongoClient("mongodb+srv://colabtest:2hdgQn0vCCJoxhY6@cluster0.s3zvk.mongodb.net/?retryWrites=true&w=majority")
|
||
|
|
||
|
|
||
|
def allowed_file(filename):
|
||
|
return '.' in filename and \
|
||
|
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
||
|
|
||
|
@app.route('/')
|
||
|
def greeting():
|
||
|
return "<H1>Greetings, traveller</H1>"
|
||
|
|
||
|
#http://7ab7-34-73-174-138.ngrok.io/insert?dbname=data&collectionname=testdata&web=com.burbn.instagram&cat=photos
|
||
|
@app.route('/insert')
|
||
|
def insert_db():
|
||
|
try:
|
||
|
dbname=request.args.get("dbname")
|
||
|
collectionname=request.args.get("collectionname")
|
||
|
web=request.args.get("web")
|
||
|
cat=request.args.get("cat")
|
||
|
db=client[dbname]
|
||
|
|
||
|
r=db[collectionname].insert({"accessor":{"identifier":web,"identifierType":"bundleID"},"category":cat,"identifier":"SELF-INSERT","kind":"intervalBegin","timeStamp":"2021-11-26T0:0:0.000+07:00","type":"access"})
|
||
|
return jsonify('Success')
|
||
|
except:
|
||
|
print('Fail')
|
||
|
return jsonify('Failure')
|
||
|
|
||
|
#https://b443-34-125-209-50.ngrok.io/find?web=com.hammerandchisel.discord
|
||
|
@app.route('/find')
|
||
|
def find():
|
||
|
web=request.args.get("web")
|
||
|
collection_data=client.data.data2
|
||
|
r = collection_data.find_one({"accessor":{"identifier":web,"identifierType":"bundleID"}})
|
||
|
if str(r) == "None":
|
||
|
res={}
|
||
|
else:
|
||
|
res = {'web':r['accessor']['identifier'],'category':r['category'],'time':r['timeStamp'],'type':r['type']}
|
||
|
print(res)
|
||
|
|
||
|
return jsonify(res)
|
||
|
|
||
|
#https://b443-34-125-209-50.ngrok.io/filter?cat=photos
|
||
|
@app.route('/filter')
|
||
|
def filter():
|
||
|
#min=request.args.get("min")
|
||
|
#max=request.args.get("max")
|
||
|
#min = int(min)
|
||
|
#max = int(max)
|
||
|
cat=request.args.get("cat")
|
||
|
ls = []
|
||
|
collection_data=client.data.data2
|
||
|
matches = list(collection_data.find({'category':cat},{'_id': False}).sort([('timeStamp', pymongo.DESCENDING)]))
|
||
|
test = collection_data.find_one({'category':cat},{'_id': False})
|
||
|
|
||
|
for match in matches:
|
||
|
res = {'web':match['accessor']['identifier'],'category':match['category'],'time':match['timeStamp'],'type':match['type']}
|
||
|
ls.append(res)
|
||
|
res = {'data':ls}
|
||
|
if str(test) == "None":
|
||
|
res = {'data':[]}
|
||
|
|
||
|
return jsonify(res)
|
||
|
|
||
|
@app.route('/alldata')
|
||
|
def get_data():
|
||
|
dp = list(collection_data2.find().sort([('timeStamp', pymongo.DESCENDING)]))
|
||
|
df = json_normalize(dp)
|
||
|
df.rename(columns = {'accessor.identifier':'iden'}, inplace = True)
|
||
|
dff = pd.pivot_table(df,index='iden',columns='category',aggfunc='count')
|
||
|
dfff = dff['_id']
|
||
|
dffff = dfff
|
||
|
dffff = dffff.fillna(0, downcast='infer')
|
||
|
dffff['all'] = dffff['camera']+dffff['kTCCServicePhotosAdd']+dffff['location']+dffff['mediaLibrary']+dffff['microphone']+dffff['photos']
|
||
|
dffff.sort_values(by=['all'], inplace=True, ascending=False)
|
||
|
dfffff=dffff.to_json(orient='table')
|
||
|
temp = json.loads(dfffff)['data']
|
||
|
dfffff = json.dumps(temp)
|
||
|
return jsonify(dfffff)
|
||
|
app.run()
|
||
|
|
||
|
collection_data = client.data.data
|
||
|
r = collection_data.find_one({"accessor":{"identifier":"com.burbn.instagram","identifierType":"bundleID"}})
|
||
|
#collection_data.find_one({"category":"photos"})
|
||
|
res = {'web':r['accessor']['identifier'],'category':r['category'],'time':r['timeStamp'],'type':r['type']}
|
||
|
res
|
||
|
|
||
|
r['accessor']['identifier']
|
||
|
r['category']
|
||
|
r['timeStamp']
|
||
|
|
||
|
collection_data.find_one({"accessor":{"identifier":"com.burbn.instagram","identifierType":"bundleID"}})
|
||
|
|
||
|
collection_data=client.data.data
|
||
|
ls = []
|
||
|
matches = list(collection_data.find({'category':'photos'}).sort([('timeStamp', pymongo.DESCENDING)]))
|
||
|
for match in matches:
|
||
|
res = {'web':match['accessor']['identifier'],'category':match['category'],'time':match['timeStamp'],'type':match['type']}
|
||
|
ls.append(res)
|
||
|
res = {'data':ls}
|
||
|
res
|
||
|
|
||
|
match['accessor']['identifier']
|
||
|
|
||
|
collection_data2 = client.data.data2
|
||
|
test = list(collection_data2.find({'category':'photos'}).sort([('timeStamp', pymongo.DESCENDING)]))
|
||
|
test
|
||
|
|
||
|
import pandas as pd
|
||
|
from pandas import DataFrame
|
||
|
from pandas.io.json import json_normalize
|
||
|
|
||
|
dp = list(collection_data2.find().sort([('timeStamp', pymongo.DESCENDING)]))
|
||
|
df = json_normalize(dp)
|
||
|
df
|
||
|
|
||
|
df.rename(columns = {'accessor.identifier':'iden'}, inplace = True)
|
||
|
|
||
|
dff = pd.pivot_table(df,index='iden',columns='category',aggfunc='count')
|
||
|
dfff = dff['_id']
|
||
|
dffff = dfff
|
||
|
dffff = dffff.fillna(0, downcast='infer')
|
||
|
dffff['all'] = dffff['camera']+dffff['kTCCServicePhotosAdd']+dffff['location']+dffff['mediaLibrary']+dffff['microphone']+dffff['photos']
|
||
|
dffff.sort_values(by=['all'], inplace=True, ascending=False)
|
||
|
dffff.to_json(orient='index')
|
||
|
dffff
|
||
|
|
||
|
dfffff=dffff.to_json(orient='table')
|
||
|
temp = json.loads(dfffff)['data']
|
||
|
dfffff = json.dumps(temp)
|
||
|
dfffff
|
||
|
|
||
|
df.keys()
|
||
|
|
||
|
|
||
|
|
||
|
collection_data.aggregate([{ 'sortByCount': "$tags" }])
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
dffff
|
||
|
|
||
|
import pandas as pd
|
||
|
import seaborn as sb
|
||
|
import matplotlib.pyplot as plt
|
||
|
import plotly.express as px
|
||
|
dp = list(collection_data2.find().sort([('timeStamp', pymongo.DESCENDING)]))
|
||
|
df = json_normalize(dp)
|
||
|
fig = px.bar(df,x='timeStamp',y='category')
|
||
|
fig3 = px.density_heatmap(df,x='accessor.identifier',y='category',z='type', histfunc="avg",nbinsx=4000, nbinsy=500, color_continuous_scale="solar")
|
||
|
fig3.show()
|
||
|
fig.show()
|
||
|
|
||
|
df.keys()
|