From 89e68f9df62cc2f2c46280bb1614bbb0a36fee8b Mon Sep 17 00:00:00 2001
From: Darrell Miller <darrell.miller@arcticwolf.com>
Date: Tue, 23 Jul 2024 20:19:54 -0500
Subject: [PATCH] Add files via upload

initial push of beta code
---
 config.example                                |   4 +
 .../jsonDataStore_lib.cpython-312.pyc         | Bin 0 -> 7556 bytes
 .../__pycache__/shodan_lib.cpython-312.pyc    | Bin 0 -> 3259 bytes
 library/jsonDataStore_lib.py                  | 141 +++++++++
 library/shodan_lib.py                         |  59 ++++
 requirements.txt                              |  21 ++
 shodanDataStore.json                          |   1 +
 shodanPull.py                                 | 276 ++++++++++++++++++
 shodanPull_v2.log                             |  11 +
 9 files changed, 513 insertions(+)
 create mode 100644 config.example
 create mode 100644 library/__pycache__/jsonDataStore_lib.cpython-312.pyc
 create mode 100644 library/__pycache__/shodan_lib.cpython-312.pyc
 create mode 100644 library/jsonDataStore_lib.py
 create mode 100644 library/shodan_lib.py
 create mode 100644 requirements.txt
 create mode 100644 shodanDataStore.json
 create mode 100644 shodanPull.py
 create mode 100644 shodanPull_v2.log

diff --git a/config.example b/config.example
new file mode 100644
index 0000000..f198ba0
--- /dev/null
+++ b/config.example
@@ -0,0 +1,4 @@
+shodan_api: '<put shodan API here>'
+shodan_query: <common shodan query>
+data_retention: 90 #days
+scan_interval: 30 #days
diff --git a/library/__pycache__/jsonDataStore_lib.cpython-312.pyc b/library/__pycache__/jsonDataStore_lib.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7bacdf9a846b2a509851ec1ec4f84db83c0a4a5b
GIT binary patch
literal 7556
zcmcgxUrZEPy07Z0YO3ji2E>1(G{wJx1_gCzapd1D(4el`Is)s$jCR_tLK~ZIwyF@j
zr*VClY$TbDlHCO7P9|e+ZZ=Mw2Xdcy)OBullKWD%K$@*}vnD$)^G49bMDN4>zEl0*
z)^YB`J%q1L)j8)o-}&?XzCZonWo4xll=J`i{p8<vQPjWiC0*E5<>@F?mMDpm=rA>5
zJ?RM=-fdyqv~9vR%}g-U>;y|=8x!WH?GyItl8KUOeuAHNOgL!DMjfRj_ESpY?%OP`
z6QwcQWB&nPd>+Q+rGO@Dp=sH4AeH2>7BG3N#-kD0!<y`HRFO?~AP~G_awA$S3@@Qb
zNDEr*EJ~+dp8BA&M9CCNIbjP?6O2Spu<*}Gwoj=EyTkx3L5U`Ki3948>_AJU5};0r
z2P#Mopk-1i(DInmBba4VYBb^vXn_$8Wubyh&fK_J`2O@yWa@-1LHRSyTBfH&e`C9!
z5|gGhF5N}5r)yJux|V%POdMXa;}X<VwlPzia%QTTip1oMqCuw2JTc0{`bIsp=?Ki?
zdQ@49*NLL|QS13G5mzaaWs0XSPRT({98_dDA<0u>+QZRHmt;k8z$7L&ql6-wf?Q4R
z=-HtmpUFw`#o0?{C7!5?oQFeUITDzb;h0%f4o{l=Wa^9Jf^jNt4ov*wT*sIyD{6-n
zP!u^Fo(_e>veMyIqB9qx*E)u0)lf7do>8JGkJ_PLj!J>Znb~mIfAvsDICN17D6x*b
zBl5%7_L-Q;`~7%T{Qmfky!Q6=_bL>l56BH_qxzkj>}Sr$yl`7v=l4G5%RbX?#TVkb
z`vb%Ei#5J~gBNZ&7o16cx54k$-|1cB`!;HuZ?fyW`zdgsg6T_$FL>F>t>}cx67`yM
z%iIneVwL-;=Lq#R?eo}72kd6*-m$^xOzdk~sfI2pP9`clfSST3S<Bqc_&fP4&28I@
z1GfW#)w;Iby1sBd>8dwe^{cK%oo`eC2B=_4OQA|pcp=Fm{s+FkZV~fvxu%LNBtd5{
zpd;tPCuoUIW5Lg<d7D;3<^obo0TLOdnP*@gf?4VQ)^n;X{~h0?%qW^lFjIKh3QBsL
zK`}CF2N(u_Y1we(2-QFxguR}#(bPCa!{2Y=+cjpKnxh*i4Q~%Ajg-;{N{w4fiiYPx
z$`o#DL6MjQ{RN=nm#-@K+pdqx?(H}gqjEsxor1~W6{I^g&%U*Eb|PI#g8hQ}l6?h!
z-c~|BmrJg+5;f0WXJfF__?)9Pyp^PYoJW6xoDL$P?f)!csWtvhvdyK=+T-+@v=XmH
zlxzc(1W1%ZkxL@#rZ}lYr$q#|_IPEw!{}w1d=ldoq7;=?F%s28`C3TT+T%5ZMW#n#
zvm1Hrhe?h{t_H#(N&Mx=X`dL15Pn@^+|e!$DN0m<>Yg|Bd$FxQA_7=P6)_qN&H}JX
zUE+&fWJHk`k4xDGhhSC)FU!Fzeq4^9$g7%cbViPtEW(P(hNA(AfVYB|U%|6e_K<-H
z_P(e#9G#6wL_nni8;OBn&JKA^7cRsr2cwcq+D*sMwV*ts0l0fO0&1p%%sUg%E}JY?
zO%`|9q-7QDC2WkCg1qf`0vXI3h7-W);|=-PSG1D|oYZlEPXNKH9m%Q|qpIbm1He7W
zyA9rbC$`3WHd&XW@B7N_w?lWgtyVT|QcPLfz3Rt1_bkO0V|T0n@Y~z$Mn(0KbJ3Zs
zs52_+^!mZI3hzd3!&kA-V_$r7oBd<?^AZ@d$y3$4Zk~AV$ktyzc8Ob*&GErwq4JN7
zOD&5ndhLPb-OEnB`-EO|(x^JQCY;(3DsG)vIFS@;453DM_pS?lo8?qR^|LY{Nmqm6
zYCs-ibPki=mC(vZ-#PTNBl?G<dd-+oHMS=FDxZ_sy>fh|`$0%Q^Wk@@?jA`Bqq;B(
zQdU&Id{#nL>_)DAIZO{NA6#zGYdVdp&Nbn10kij4DpwfY-IEl0b)gq5%FE3X82&;%
z4qN<{Fu+sy`|bVR%)^5Ys6XuH2Dp+xQ!Lc~%rQv$es-vvc~oA7RP1NGEzGwq0=#_t
zUhhyR^Ix4D)QdY?|M>=s^k!?Mti8Q4&e||yeKJlPtS?oH^GeXWp<N<0jW6>o^!=$E
zY{)0WfqM-Ho5bYxvnSYu{fZZ~k58o965t2-0IviGEwFcaEjH4^NbEJ+H8P{b#lflu
znP94ADr2Kl%SNeE)z|F+;aSwC7j;*O)%h~VZpWz5Yuu)UwUYG8Y^jxH--*;x39Ow*
zDI^YRERhH7c*6Gouo?$izueV?sQD6byi0v%>10SzHNPqYs5-)dOcg^ORSQhdn5Ed}
z4+4;CKcJfz-%F4W2(cVvGXRRRBo2Z59Rpj09$T7IFc69F%Iocu=h8hrb_=Q*@CnVD
zU^H@7R<sdK8HJfgaAqwEkSgP=1?=cIVX!qB_3#!zW3(3VUy);G8L4SeKk@X<Dttv8
z_J)Fp=-`r@+sI1EVzP^?!X+1c+nLA9pXsdZ2LV*{&(ce>9NzMPM8JPEN+Ol`-h9~4
zZ#|4Wu3C8SblvJ(=u0|l3}?-q%WKYNgzW2|Ur*K^G-?knx2)E7>w<gJPBk<q>%B(3
z_rd62E<C)TkA7?no!2i+8ADfA>%+QGyHVGatQ$1y1|N9;;(O@RM?NyVAM58Y8{W`r
z-IT?nxh?tbG2`81-__{nE*i&!tM5v>(12jupf~mW)z!0cuy5rP<KPLsqW*4=QE}jj
zt4?q1{;R8d<3R7q7307!v7y_j=y)Qy?<7`*efI)}&;ielro+jm9;2yewW;r>_m*$L
zcjtXWXw1O#UPIWc*LJTQTj|nkhS!Cou%edMoSsiw53e|lR`1PIclH@V^M<E$`4hu4
z2(N91@a~4EHC=0MOLh61u=`2#d&%Y_M)Q%?=I$I`1+yMr;g{oj&A^&4xUp^fg7?*^
zf`tPiQN_nlS)w$&;-pA)q1FJl;O#Cb#ZXkT6^;dMV|?*yNGL#h0em&$rjg4k5``kV
z<So+bVJy!DZ@+Q`h{tAKeg^Knit?nrkZpvyOs+j#f9o>V<zL1kUCsjwbsvx$6hxVN
zWz*f@-7($Ow#M(v#F)DbewV)I@Cvg$tMBez=liUE%<L76#yuqah^%t=vG}f~t;h(a
zV2l8vB?HkpK?J1+KIBD|1bohduck<9VMLkRb`b|w0)eHts&2ox)VJ8T>Z;fI`d7~#
zJwYPx4D8`+qe)1g6g%^ICNG8rYnG1FAckarB!(=SG)b_M?HlGkN(6;uu=1imEt6Ey
z1gj53sO9Ne_9?Lm2BJBxFq(Vo@pf1-Zec-5+gG$Buf#Z+$TK@=DieKXzkv-*$(BBS
zuz%Fl+m3z<$+CQ7uf)oq{d*=-&cwqI<8{t+*}akY1eb3qUn3tnI0Z#=6pkIosFJ(R
zk?0bhb95$x1ItU#{eaj5nJ;@NBFPagUZ2f*sk6bLtg4fcR*H#pN=Vb>h^R%y7j(Oo
z9mBZKH#>b%hJb2POzDQG2CiaY6>kCy_2bNxI+$OnM<L2A&BLHeX++Wl1md09>6vWc
zq~N_!hLI37A~9Dhz7kxr6I@d8;EAbJ+OURjptJ)~QR6JjY1z#(l-8R;sy+EgmDf0l
z<I60oSyp-L!or1{Y)-heX>&Sya|0y0tP27ph>0AFjysKCwSV4zuW|V|>lH_yGtl%s
ziFH~RTXnFQ#~*Zm7tm|Z7*%K1gby*;cyFm|u?u3T2i~N2%<zt_de7;Dzt+WZW83(e
zFp;q^BpMQj9)uoz1QCqxKD#E26g2ifXiN@_7y~1#17mvsuXOjhb>Y{~ogl*|1?1&3
z2%|`p(+d;p)y?<lyQ*H*wgyHu9dlXM^Y9A2tm-x0NukFOdR}gFHb?IZwF9>0--N+V
z>Q4=Wt;|EmfC%+Rt=!<xN*;BTV7=4f{W<gKgMHou%(n+PsOM$WG8p(Du(g2Mz|Jbg
zFt3=M%SI&XWES-bb86t@Vv@F?<YvVRb6r`D{1LQ!eeMdw{em)u*SnyAH=fFV<zh$C
z`7zO1P*S-wdm-8aYW%eMfkW>6g?YZu7A)-Yl=zfmAcKN70P{PR+mO~FNXLjlD?#2K
zuR;t!n8>IYrhfuqOhd%1?2p>+ahkR$BvnX~!)~Qm2mqKIqK;gs{29GB?c+lyPo6#p
z?g{QoR@c*MbfV)*1Ik&Oui3BUbmpLwx(wt7^}Va|*4)AzCWG&U)?6*QN^L8vy;uDN
zl)7}W^v)s3X>jo6nVl-Dg3JZc=0nRzmfKdw_1dFG)zLNKSVoB~RZTa$kFE>HED&g0
zY}4z1wtRfKTkrGf;%TGm^qO#{*f_oSl<xMe3#XsEVA@Rz$O{#X@;6n3cIJ_t8+4WA
zDHnn}aWD!U%VNrvRU}A3XWXhO<WW#^l?&1UkO21Bz_Dx^-`_O7zeyTxI@Z;6s;g<F
zy=fB81_dz7(Af;B(aESX9nef()fC8c!4Dy<<w*+g0mCd3b(sQ$KjbGcfAr!ML``H6
zM0nXiNJX&_$*nVfAKv0^1!w%GaPD_7Kt+=PZraY;<j(!Z&i(hot2?`I3eR||wmIqU
zFx(x>g5Ec}>K@biJx{VeoX_LJ-%;>)9KYWz_4}uz(kx^L{ifje|7JE2wpwz`8M(4M
zVK&8$1Tj<j1(HD|!$?jc!K<p^TE3=;w`eU6Pgp^M|7t&wPpOUaiW|o_cI?vG?HkUr
z8^c>X$F*!#(_GUQU(Ho-wbyeywocME?t`sTnrqnN%emgIdM9^=e(Ema8Z7R}&*M~1
z!-E)Ga#RZLtb%D7lMhD2Q6(@PAUR6~QyvP&p5*r~B9VfgdsfsJe2=vkeG2~KhnGmj
mHEl99P5+&8{GD?ClR9KjhyF?JTc!3rFCVbckheohnEWqLQBrjP

literal 0
HcmV?d00001

diff --git a/library/__pycache__/shodan_lib.cpython-312.pyc b/library/__pycache__/shodan_lib.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3be4105de158c206909eb6ae5096a32573f3c081
GIT binary patch
literal 3259
zcmbtWO>7fK6rTOF*U6d~5}g0UffV8VG!Ru#A{rXVPl3qLKxyN)S}ooU*sQ%_cHLlO
z+Nk0{q*f5MQmCq`IZ{!n98oU?+FLKXiczd>B&ga;ZxK1PN-v$+UGL_{l`3^4zkM_B
zXWq=b?>+w<3|0X&+Q0jB>bW0)-|(b7Xop#gA!Y%HKqO?4262!k;zXK^lLTHPWhzdg
zFD27yCeEaNabMaW_ovx7OMn0%!59$fn?PjlleWKjAV)@hf8a@!GFeg3BrTbi%mAjM
zBx{1nVm7a2q*#<T=}`qrCVgB;Trrt*T24ljWG1O4Y<D&#oXJ{0Vite|;)Dp|q)4FH
zh&U~haRz;T=;s%yn;_2OnBoDEL9{C8i~7xwdRY;L3@=P4`GhQ}stHCD)P_A6#N+we
zA+*^7D3ArfyJF4N6p3%i`%X9n(6E&?G+&uZv1QKtic}s=Ji7vcynD96vSw;Wne)B{
zkg^`pTftn}7xLrohcs(9YvEa8qps_zV&viN)*_9()@@(^5`0B&K2r`vnb`R#VFrY(
zhTc?_w0!MJ^pZ(IPUe%DOObQd6XZK0k;uE<?+-Zh$O$E#&SsJcK}#x`NKze$L^;zZ
zE0-=w5C+iJCNm9_84cnqHkq-Bk&&3WW5au1k#eTbo|`^<6SK;ZD;HB{AaPkrT)|K8
zrojg&CXu%cCr8G|&%BLtl~yHr3hqLiz#2qU{N^as^|O6%sS;HCL;*rcmeWaDmSEp7
zRHiQ~*ZM}YDh~TBR8mqxQ~T_F&t_$r|DeB5PF{op%sHz(n!Y@pGg+R;DbDlxMkh=+
zEnhB+a2zT`WEPa_nr_jbRIjky9qlRGvcd*G(QfDG@_Nfbqvl|d?Ju=--J*+Z^GZY8
z*TFA>i{kRWBgVcXOAQ0auI72$p06`sWbU0`YU{^pt+s>fbJz9u*B{j_4_z>ZF6ghn
zt+&2o)V%YQom@k^f`o%-%maM$7C5FXa0F0Nr7Xc_l5%0<t>1Up8)SimEy%81cRUr4
zLbYk@5Q%b%6Cj7c_@C{l4R-Wipt<m<1fxy>$8J!X3qZ{UU@EjVB7mw~f!dz<{}NjT
zD2hJY-ivS&)z}-<hg6QXzca)PxJFHa86xVB<(s!4W$5flgpY`oyyx@nSla91$dSg*
zoVPVb*Zm_9DsUi@Z>r$PkwvTEZgf&ybf*+6@l8gQE@m%5d;pV0UM)?C@56>5H8C=5
zG9wccXC|UFY{cu93qJyVR@PLLkW~E2XwwL6!eU%l@=e=v-Nj`KlB;(RnFXsP7icVn
zn--LLWjWksgu9l)J-5bwtqGSxbqm$=)yts{Bh;aH9xH~1O7+dpH?N*3hDJ*DEq|@l
z?7scc+($0}6=+=Hc7EQuuz!C4-I>L!i;27M8@sxSTz82J-99~cdYNlAxK=$f@bKWn
zy?WbVkvq0h+wk|QA6c%eII@oq4~4<^&4@e>*B(DWJ&wM1yqkK`%^<!qOzIGL7Jyc6
zTR?NcR4I)`qCiwkE4QbhAe@oH1#!y|N!#8_D+{1*t@qSZZlZf@1s7;H$choWknn6o
zsz8b~e)<0u_a-l1^aV1t)7z`el?%x^fIy9_qctk^CI{4&Zog88`!`Tmx_x})Eu&l%
zy!c&EJK){`YF{kBtBl1rvl7gqzKHPIxOL6xjYw2q7ERdTicBflj5rW68BNgy8MdKt
zqQMR5q)$u^#VjDhb|kZ*=~E>ECN4vKuO^*PrgIj&VF%XOgUMb*95kDpgVd`yLfDDL
zFCnrS)Fx+%d0RK%x((eM#ogHa86va5?moG>oZi~^fV=mh-aKg33>MjAC6>EgJy*TV
zHW+M!-u%jg_a4Ob#uG($c(p1J=yhvCj}h*<_uf)?;MUmh7P7nMyY!A%AJ#vrH;$at
zUp=L_pEh=#E^^~8!dnckMQ=a!VCLbBad=EW^oHJgvdEo!UfZ;AbpGgPgF4sYRSYuF
z`(G-?qW>PTsNPL4b3F#v^Y<!)?AFx=bjBZsdXM*lC%v`9e(Fc?+AvA|L^6nvMXPb^
z&j@LW=glgfPb*?p#yrRKSF?g_OYFZO_{Ly8Cb+iR@7atwoWigNk#7LR)yb}~)tJMC
zLq8RZZ-P?o?%7yrXYK5GX?K%O*Oh8^&Ymh&2WLlLkSr4?UepDd#<jK((`!4z=BORu
zCJu4^hWH<<(*_cXtUw_xSRDZ3ehqPBwf>)3`@#cw>ISsV9<@(52>X!$V^PH^wn`BM
h@eK4Ep#K*Td<J@!K+h`eBj`VO^bx%;03w!?e*hD3rV9W7

literal 0
HcmV?d00001

diff --git a/library/jsonDataStore_lib.py b/library/jsonDataStore_lib.py
new file mode 100644
index 0000000..ea4dcb5
--- /dev/null
+++ b/library/jsonDataStore_lib.py
@@ -0,0 +1,141 @@
+import os
+import json
+from datetime import datetime, timedelta, timezone
+from colorama import Fore, Back, Style, init
+
+##############################################################################
+# jsonDataStore Class
+# Description: VERY basic way of storing information in a json formatted text file.
+# Long term probably need to convert this to something else.. sql-lite? i dunno.
+##############################################################################
+class jsonDataStore:
+    dataStore={}
+    datestoreFilename=''
+    logger=''
+
+
+    # Creation Method: Nothing really going on here other than creating object
+    def __init__(self, filename, logger):
+        self.logger=logger
+        init(autoreset=True)
+        print (Fore.GREEN + f'   [*]: Data Store Object Created')
+        self.logger.debug("   [*]: Data Store Object Created")
+        self.datestoreFilename=filename
+
+        #self.getDataStore(filename)
+
+    # getDataStore Method: returns data stored in class/method
+    def getDataStore(self):
+        return self.dataStore.copy()
+
+    # readDataStoreFromFile: reads json datastore master file
+    def readDataStoreFromFile(self, file_path):
+        file_path=self.datestoreFilename
+        self.check_file_exists(file_path)
+
+        try:
+            # Open and read the JSON file
+            with open(file_path, 'r') as file:
+                data = json.load(file)
+                self.logger.debug("   [+]: reading data from file.")
+                self.dataStore=data.copy()
+            return data
+        except FileNotFoundError:
+            print(Fore.RED + f"   [+]: The file {file_path} does not exist.")
+            self.logger.debug("[+]: The file does not exist.")
+        except json.JSONDecodeError as e:
+            print(Fore.RED + f"   [-]: Invalid JSON in file: {file_path}. Error: {e}")
+            self.logger.debug(f"[-]: Invalid JSON in file: {file_path}. Error: {e}")
+        except Exception as e:
+            print(Fore.RED + f"An error occurred: {e}")
+            self.logger.debug(f"[-]: An error occurred: {e}")
+        return None
+
+    # addDataToStore: adds new shodan data to datestore.
+    def addDataToStore(self, data_key, data_to_store):
+        if self.dataStore.get(data_key): #already in DB
+            dataFromDictionary=self.dataStore.get(data_key)
+
+            # converts text timestamp to datetime stamp so you can compare
+            firstseen_timestamp = self.convertStrTimeStamptoDateTime(self.dataStore[data_key]['first_seen'])
+            lastseen_timestamp = self.convertStrTimeStamptoDateTime(self.dataStore[data_key]['last_seen'])
+
+            # converts text timestamp from new entry to datetime stamp so you can compare
+            data_to_store_timestamp = self.convertStrTimeStamptoDateTime(data_to_store['timestamp'])
+
+           # Because python reads files in a random order not alphabetically or by date, you gotta compare dates with
+           # each item read. (only applies to reading old data files)
+            if firstseen_timestamp > data_to_store_timestamp:
+                dataFromDictionary['first_seen']=data_to_store['timestamp']
+
+            if lastseen_timestamp < data_to_store_timestamp:
+                dataFromDictionary['last_seen']=data_to_store['timestamp']
+
+            dataFromDictionary['seen_count']+=1
+            self.dataStore[data_key]=dataFromDictionary.copy()
+            self.logger.info(f"        [+]: Updated Entry: {data_key}")
+
+        else: # new entry
+            self.dataStore[data_key] = {}
+
+            data_to_store['first_seen'] = data_to_store['timestamp']
+            data_to_store['last_seen'] = data_to_store['timestamp']
+            data_to_store['last_scan'] = 0
+            data_to_store['seen_count'] = 1
+            data_to_store['vulnerability_count']: len(data_to_store['vulns'])
+
+            self.dataStore[data_key] = data_to_store.copy()
+            self.logger.info(f"        [+]: New Entry: {data_key}")
+            data = {}
+
+    # deleteFromDataStore: delete entry in data store by key. used for pruning
+    # old entries in the database
+    def deleteFromDataStore(self, key):
+        print (f'deleting from store {key}')
+        self.logger.info(f'   [+]: Deleting from store {key}')
+
+    # countRecords: returns number of records in data store
+    def countRecords(self):
+        return len(self.dataStore)
+
+    # saveDataStore: saves all data in data store to json file
+    def saveDataStore(self, filename):
+        data=self.dataStore
+        file_path=self.datestoreFilename
+        try:
+            # Open and write to the JSON file
+            with open(file_path, 'w') as file:
+                json.dump(self.dataStore, file, indent=4)
+            print(Fore.GREEN + f"[*]: Data successfully written to {file_path}.")
+            print (Fore.GREEN + f'   [+]: Number of Records saved: {self.countRecords()}')
+            self.logger.debug(f"   [*]: Data successfully written to {file_path}.")
+            self.logger.debug(f'   [+]: Number of Records saved: {self.countRecords()}')
+
+        except Exception as e:
+            print(f"An error occurred: {e}")
+            self.logger.info(f"[-]: An error occurred: {e}")
+
+    # check_file_exists: just checks if the data store file exists, if not creates one
+    def check_file_exists(self, file_path):
+        # Check if the file exists
+        if not os.path.exists(file_path):
+            # Create the file
+            with open(file_path, 'w') as file:
+                # Optionally write initial content to the file
+                file.write('')
+            print(Fore.GREEN + f"  [+]: DataStore {file_path} created.")
+            self.logger.debug(f"   [+]: DataStore {file_path} created.")
+        else:
+            print(Fore.YELLOW + f"  [+]: DataStore {file_path} already exists.")
+            self.logger.debug(f"   [+]: DataStore {file_path} already exists.")
+
+    # convertStrTimeStamptoDateTime: datastore file saves date as text, this converts
+    # back to python datetime
+    def convertStrTimeStamptoDateTime(self, strTimeStamp):
+        datetime_obj = datetime.fromisoformat(strTimeStamp)
+
+        # Format the datetime object to the desired format
+        # For example, converting to 'YYYY-MM-DD HH:MM:SS' format
+        formatted_timestamp = datetime.strptime(strTimeStamp, '%Y-%m-%dT%H:%M:%S.%f')
+
+        return formatted_timestamp
diff --git a/library/shodan_lib.py b/library/shodan_lib.py
new file mode 100644
index 0000000..713ee65
--- /dev/null
+++ b/library/shodan_lib.py
@@ -0,0 +1,59 @@
+from datetime import datetime, timedelta, timezone
+import shodan
+from colorama import Fore, Back, Style, init
+
+'''
+shodan_api_class: just a wrapper for shodan api
+
+__init__ : takes in the shodan api and verifies communication
+check_api : verifies communication with shodan
+query_shodan : queries shodan with given query
+'''
+class shodan_api_class:
+    shodan_api_key=''
+    shodan_query=''
+    shodan_valid_key=False
+    shodan_obj=''
+    logger=''
+    def __init__(self, shodan_api_key, logger):
+        self.logger=logger
+        init(autoreset=True)
+        print (Fore.GREEN + "Initializing Shodan")
+        self.shodan_api_key=shodan_api_key
+        self.shodan_obj=shodan.Shodan(self.shodan_api_key)
+
+        self.shodan_valid_key=self.check_api(self)
+        print (Fore.YELLOW + f'   [+]: Shodan Communication is: {self.shodan_valid_key}')
+
+    def check_api(self,shodan_obj):
+        try:
+            results = self.shodan_obj.info()
+
+            if results:
+                self.logger.debug(f"   [+]: Shodan API is valid")
+                return True
+            else:
+                self.logger.debug(f"   [-]: Shodan API is NOT valid")
+                return False
+        except shodan.APIError as e:
+            print(Fore.RED + f"Error: {e}")
+            self.logger.ERROR(f"   [+]: Shodan API Error: {e}")
+            return False
+
+    def query_shodan(self, shodan_query):
+        print (Fore.CYAN + f'   [+]: Querying: {shodan_query}')
+        self.logger.info(f'   [+]: Querying Shodan: {shodan_query}')
+
+        # Define the query parameters
+
+        # Perform the search query
+        try:
+            results = self.shodan_obj.search(shodan_query)
+            # Print the results
+            print(Fore.GREEN + f"   [+]: Results found: {results['total']}")
+            self.logger.info(f'   [+]: Results found: {results['total']}')
+
+            return results.copy()
+        except shodan.APIError as e:
+            print(Fore.RED + f"Error: {e}")
+            self.logger.ERROR(f"Error: {e}")
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..3f2764c
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,21 @@
+art==6.2
+certifi==2024.6.2
+charset-normalizer==3.3.2
+click==8.1.7
+click-plugins==1.1.1
+colorama==0.4.6
+filelock==3.15.1
+fire==0.6.0
+idna==3.7
+pyfiglet==1.0.2
+PyYAML==6.0.1
+requests==2.32.3
+requests-file==2.1.0
+shodan==1.31.0
+six==1.16.0
+tabulate==0.9.0
+termcolor==2.4.0
+text2art==0.2.0
+tldextract==5.1.2
+urllib3==2.2.1
+XlsxWriter==3.2.0
diff --git a/shodanDataStore.json b/shodanDataStore.json
new file mode 100644
index 0000000..9e26dfe
--- /dev/null
+++ b/shodanDataStore.json
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff --git a/shodanPull.py b/shodanPull.py
new file mode 100644
index 0000000..d26679c
--- /dev/null
+++ b/shodanPull.py
@@ -0,0 +1,276 @@
+#this is rough.. but works
+import argparse
+from datetime import datetime, timedelta, timezone
+import datetime
+import json
+import yaml
+import os
+import shutil
+from library import shodan_lib
+from library import jsonDataStore_lib
+from tabulate import tabulate
+from colorama import Fore, Back, Style, init
+import logging
+import argparse
+from art import text2art
+
+from library.shodan_lib import shodan_api_class
+# --===============================================================--
+#                               Gather
+# --===============================================================--
+def gather(config, dataStore, logger):
+    shodan_obj=shodan_lib.shodan_api_class(shodan_api_key=config['shodan_api'], logger=logger)
+
+    # Get the current UTC date and time
+    now_utc = datetime.datetime.now(timezone.utc)
+
+    # Calculate yesterday's date in UTC
+    yesterday_utc = now_utc - timedelta(days=3)
+
+    # Format yesterday's UTC date as a string
+    yesterday_utc_str = yesterday_utc.strftime("%Y-%m-%d")
+    now_utc_str = now_utc.strftime("%Y-%m-%d")
+
+    print (yesterday_utc_str)
+
+    # Define the search query with the timestamp and geographical filters
+    query = f'after:{yesterday_utc_str} country:US state:LA'
+    current_date = datetime.datetime.now()
+
+    results=shodan_obj.query_shodan(query)
+    ProcessShodanResults(results['matches'], dataStore)
+
+# --===============================================================--
+#                               Hunt
+# --===============================================================--
+def hunt(config, dataStore):
+    print (Fore.GREEN + f'[*]: Hunting through Shodan Data')
+    print (Fore.GREEN + f'  [+]: Hunter has {dataStore.countRecords()} of records to go through')
+
+    data=dataStore.getDataStore()
+
+    #Pruning Data Store (removing old/dead records)
+    print(Fore.GREEN + f'  [+]: Pruning records that are over {config['data_retention']} days old.')
+    for record in data:
+        firstLastDelta = (dataStore.convertStrTimeStamptoDateTime(data[record]['last_seen']) - dataStore.convertStrTimeStamptoDateTime(data[record]['first_seen']))
+
+        if firstLastDelta.days > config['data_retention']:
+            #TODO: pop dead records
+            print (Fore.RED + "     Kill It  [-]: record ",":", firstLastDelta.days)
+
+        if (data[record]['last_scan'] == 0) or (datetime.now() - dataStore.convertStrTimeStamptoDateTime(data[record]['last_scan']) > config['scan_interval']):
+            #TODO: scan here
+            #TODO: put current date in last_scan
+            print (f'Scan here')
+
+# --===============================================================--
+#                                Show
+# --===============================================================--
+def show(config, dataStore):
+    print(Fore.GREEN + f'[*]: Displaying Shodan Data')
+    print(Fore.GREEN + f'  [+]: Displaying {dataStore.countRecords()} records')
+
+    data = dataStore.getDataStore()
+
+    headers = ['IP', 'First Seen Date', 'Last Seen Date', 'Last Seen -> Today', 'Last Scan Date', 'Vulnerability Count','Seen Count','Location']
+    tableData = []
+    table_row_count=0
+
+    date_str=str(datetime.datetime.now())
+    file_str="table_"+date_str+".txt"
+    file_str=file_str.replace(" ","_")
+
+    for record in data:
+
+        dateDiff = datetime.datetime.now() - dataStore.convertStrTimeStamptoDateTime(data[record]['last_seen'])
+        vulnCount = len(data[record]['vuln_list'])
+
+        if dateDiff.days < 15:
+            location_str=data[record]['location']['city']+","+data[record]['location']['region_code']
+
+            row = [data[record]['ip_str'], data[record]['first_seen'],
+                   data[record]['last_seen'], dateDiff, data[record]['last_scan'],
+                   vulnCount, data[record]['seen_count'],location_str]
+            tableData.append(row)
+            table_row_count+=1
+
+    print(tabulate(tableData, headers, tablefmt="pretty"))
+
+    if table_row_count>50:
+        print (f'   [+]: large amount of records, saving output to table.txt')
+        print (f'   [+]: records saved: {table_row_count}')
+        finalTable=tabulate(tableData, headers, tablefmt="pretty")
+        with open(file_str, "w") as file:
+            file.write(finalTable)
+
+
+# --===============================================================--
+#                    Process Shodan Results
+# --===============================================================--
+def ProcessShodanResults(results, dataStore):
+    dictEntry={}
+
+    #pulling out important fields and throwing out the crap
+    for result in results:
+        if result.get('vulns'): #if there is a vulnerability add it to list
+            if result.get('ip_str'):
+                dictEntry['timestamp'] = result.get('timestamp')
+                dictEntry['ip_str']=result.get('ip_str')
+                dictEntry['port'] = result.get('port')
+                dictEntry['version'] = result.get('version')
+                dictEntry['location'] = result.get('location')
+                dictEntry['ip'] = result.get('ip')
+                dictEntry['product'] = result.get('product')
+                dictEntry['timestamp'] = result.get('timestamp')
+                dictEntry['hostnames'] = result.get('hostnames')
+                dictEntry['org'] = result.get('org')
+                dictEntry['isp'] = result.get('isp')
+                dictEntry['os'] = result.get('os')
+                dictEntry['vuln_list'] = list(set(result['vulns'].keys()))
+
+                print (Fore.GREEN + f'     [+ Had vulnerability +]: : {result['ip_str']} to dataStore')
+                logger.info(f'     [+ Had vulnerability +]: : {result['ip_str']} added to dataStore')
+                dataStore.addDataToStore(data_key=dictEntry['ip_str'], data_to_store=dictEntry.copy())
+        else:
+            print(Fore.YELLOW + f'     [-  No Vulnerability -]: : {result['ip_str']} not added')
+            logger.info(f'     [-  No Vulnerability -]: : {result['ip_str']} not added to data store')
+
+# --===============================================================--
+#                    Read JSON Files from folder
+# --===============================================================--
+def list_json_files(folder_path):
+    # Get a list of all files in the specified folder
+    files = os.listdir(folder_path)
+
+    # Filter the list to include only JSON files
+    json_files = [f for f in files if f.endswith('.json')]
+
+    return json_files
+
+def read_json_file(file_path):
+    json_objects = []
+    try:
+        # Open the text file
+        with open(file_path, 'r') as file:
+            # Read each line in the file
+            for line in file:
+                # Strip any extra whitespace and parse the JSON object
+                json_object = json.loads(line.strip())
+                json_objects.append(json_object)
+
+    except FileNotFoundError:
+        print(Fore.GREEN + f"The file {file_path} does not exist.")
+    except json.JSONDecodeError as e:
+        print(Fore.GREEN + f"Invalid JSON on line: {line.strip()}. Error: {e}")
+    except Exception as e:
+        print(Fore.GREEN + f"An error occurred: {e}")
+
+    return json_objects
+
+def processShodanJSONFiles(folderToProcess):
+    fileWithPath=[]
+
+    print (Fore.GREEN + f' [+]: folder being processed: {folderToProcess}')
+    jsonFileList = list_json_files(folderToProcess)
+
+    # adding folder to filename for processing
+    folderToProcess=folderToProcess+'/'
+    for item in jsonFileList:
+        item=folderToProcess+item
+        fileWithPath.append(item)
+
+    for fileItem in fileWithPath:
+        print (Fore.GREEN + f'  [+]: Processing: {fileItem}')
+        jsonFile=read_json_file(fileItem)
+
+        ProcessShodanResults(jsonFile, dataStore)
+
+# --===============================================================--
+#                            Load Config
+# --===============================================================--
+def load_config(file_path):
+    #todo: check for file, if no file, create it
+
+    with open(file_path, 'r') as file:
+        config = yaml.safe_load(file)
+    return config
+
+def check_config_exists():
+    source_file = 'config.example'
+    target_file = 'config.yml'
+
+    if not os.path.exists(target_file):
+        if os.path.exists(source_file):
+            shutil.copyfile(source_file, target_file)
+            print(Fore.GREEN + f"Copied {source_file} to {target_file}.")
+            print (Fore.GREEN + f"Error: {target_file} Configuration file did not exist.")
+            print (Fore.GREEN + f"{target_file} was created, please edit with text editor for your configuration")
+            return False
+        else:
+            print(Fore.GREEN + f"Source file {source_file} does not exist.")
+            return False
+    else:
+        print(Fore.YELLOW + f"  [+]: {target_file} already exists.")
+        return True
+
+# --===============================================================--
+#                               Main
+# --===============================================================--
+
+if __name__ == "__main__":
+    init()
+
+    # Configure logging settings
+    logging.basicConfig(
+        level=logging.INFO,  # Set the logging level to DEBUG
+        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',  # Log message format
+        filename='shodanPull_v2.log',  # Log file name
+        filemode='a'  # Append mode
+    )
+
+    logger = logging.getLogger('shodanPull_v2 Logger')
+    logger.info(f'--===========================================--')
+
+    config_exists=check_config_exists()
+
+    if config_exists:
+        print(Fore.BLUE + f"--===============================================================--")
+        print(Fore.BLUE + f"--==                       qShodan                             ==--")
+        print(Fore.BLUE + f"--===============================================================--")
+
+        dataStore = jsonDataStore_lib.jsonDataStore('./shodanDataStore.json', logger)
+        dataStore.readDataStoreFromFile('./shodanDataStore.json')
+
+        config = load_config('config.yml')
+
+        gather(config, dataStore, logger)
+
+        #show(config, dataStore)
+
+        # parser = argparse.ArgumentParser(description='A tool with gather, hunt, and show functionalities.')
+        #
+        # subparsers = parser.add_subparsers(dest='command', help='Sub-command help')
+        #
+        # # Gather command
+        # parser_gather = subparsers.add_parser('gather', help='Gather data')
+        # parser_gather.set_defaults(func=gather)
+        #
+        # # Hunt command
+        # parser_hunt = subparsers.add_parser('hunt', help='Hunt targets')
+        # parser_hunt.set_defaults(func=hunt)
+        #
+        # # Show command
+        # parser_show = subparsers.add_parser('show', help='Show results')
+        # parser_show.set_defaults(func=show)
+        #
+        # # Parse the arguments
+        # args = parser.parse_args()
+
+        # Execute the appropriate function based on the command
+        # if hasattr(args, 'func'):
+        #     args.func()
+        # else:
+        #     parser.print_help()
+
+        dataStore.saveDataStore('./shodanDataStore.json')
+        logger.info(f'--===========================================--')
diff --git a/shodanPull_v2.log b/shodanPull_v2.log
new file mode 100644
index 0000000..ae3246a
--- /dev/null
+++ b/shodanPull_v2.log
@@ -0,0 +1,11 @@
+2024-07-23 14:52:44,978 - shodanPull_v2 Logger - INFO - --===========================================--
+2024-07-23 14:53:41,688 - shodanPull_v2 Logger - INFO - --===========================================--
+2024-07-23 14:53:41,957 - shodanPull_v2 Logger - INFO -    [+]: Querying Shodan: after:2024-07-20 country:US state:LA
+2024-07-23 14:53:46,669 - shodanPull_v2 Logger - INFO -    [+]: Results found: 1
+2024-07-23 14:53:46,671 - shodanPull_v2 Logger - INFO -      [-  No Vulnerability -]: : 199.19.233.115 not added to data store
+2024-07-23 14:53:46,672 - shodanPull_v2 Logger - INFO - --===========================================--
+2024-07-23 14:54:41,166 - shodanPull_v2 Logger - INFO - --===========================================--
+2024-07-23 14:54:42,010 - shodanPull_v2 Logger - INFO -    [+]: Querying Shodan: after:2024-07-20 country:US state:LA
+2024-07-23 14:54:43,146 - shodanPull_v2 Logger - INFO -    [+]: Results found: 1
+2024-07-23 14:54:43,147 - shodanPull_v2 Logger - INFO -      [-  No Vulnerability -]: : 199.19.233.115 not added to data store
+2024-07-23 14:54:43,148 - shodanPull_v2 Logger - INFO - --===========================================--