aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--DSAA/chap3_lists_stacks_queues/ex_2.c64
-rw-r--r--DSAA/chap3_lists_stacks_queues/ex_3.c45
-rw-r--r--DSAA/chap3_lists_stacks_queues/ex_4&5.c136
-rw-r--r--DSAA/chap3_lists_stacks_queues/ex_6.c6
-rw-r--r--DSAA/chap3_lists_stacks_queues/list.c145
-rw-r--r--DSAA/chap3_lists_stacks_queues/list.h33
-rw-r--r--DSAA/chap3_lists_stacks_queues/polynomial_ADT.c (renamed from NjuSpider/njubbs/njubbs/__init__.py)0
-rw-r--r--DSAA/chap3_lists_stacks_queues/polynomial_ADT.h22
-rw-r--r--NjuSpider/njubbs/njubbs/__init__.pycbin107 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/__pycache__/__init__.cpython-34.pycbin149 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/__pycache__/items.cpython-34.pycbin610 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/__pycache__/settings.cpython-34.pycbin264 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/items.py21
-rw-r--r--NjuSpider/njubbs/njubbs/items.pycbin396 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/pipelines.py11
-rw-r--r--NjuSpider/njubbs/njubbs/settings.py91
-rw-r--r--NjuSpider/njubbs/njubbs/settings.pycbin297 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/221
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/__init__.py4
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/__init__.pycbin161 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/__pycache__/__init__.cpython-34.pycbin157 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/__pycache__/jwSpider.cpython-34.pycbin879 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/__pycache__/njubbsSpider.cpython-34.pycbin927 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/__pycache__/quoteSpider.cpython-34.pycbin862 -> 0 bytes
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/jwSpider.py28
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/njubbsSpider.py22
-rw-r--r--NjuSpider/njubbs/njubbs/spiders/quoteSpider.py14
-rw-r--r--NjuSpider/njubbs/scrapy.cfg11
-rwxr-xr-xNjuSpider/pythonbin3781768 -> 0 bytes
-rw-r--r--SICP/ch1_3.scm33
30 files changed, 484 insertions, 223 deletions
diff --git a/DSAA/chap3_lists_stacks_queues/ex_2.c b/DSAA/chap3_lists_stacks_queues/ex_2.c
new file mode 100644
index 0000000..103fef6
--- /dev/null
+++ b/DSAA/chap3_lists_stacks_queues/ex_2.c
@@ -0,0 +1,64 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "list.h"
+
+Position FindNth( List L, int nth )
+{
+ int i;
+ Position P;
+ P = L;
+
+ for( i = 0; i < nth && P != NULL; i++ )
+ {
+ P = P->Next;
+ }
+
+ return P;
+}
+
+void PrintLots( List L, List P )
+{
+ int i;
+ Position Posi;
+
+ for( P = P->Next; P != NULL; P = P->Next )
+ {
+ Posi = FindNth( L, P->Element );
+
+ if (Posi != NULL)
+ {
+ printf("%d ", Posi->Element);
+ }
+ }
+
+ printf("\n");
+}
+
+int main()
+{
+ List L, P;
+ L = malloc( sizeof( struct Node ) );
+ P = malloc( sizeof( struct Node ) );
+
+ Insert(6, P, P);
+ Insert(4, P, P);
+ Insert(3, P, P);
+ Insert(1, P, P);
+
+ Insert(8, L, L);
+ Insert(7, L, L);
+ Insert(6, L, L);
+ Insert(5, L, L);
+ Insert(4, L, L);
+ Insert(3, L, L);
+ Insert(2, L, L);
+ Insert(1, L, L);
+
+ PrintList( P );
+ PrintList( L );
+
+ PrintLots( L, P );
+
+ return 0;
+}
diff --git a/DSAA/chap3_lists_stacks_queues/ex_3.c b/DSAA/chap3_lists_stacks_queues/ex_3.c
new file mode 100644
index 0000000..6981ce9
--- /dev/null
+++ b/DSAA/chap3_lists_stacks_queues/ex_3.c
@@ -0,0 +1,45 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "list.h"
+
+void SwapSinglyAdjacentNode( Position FrontP, Position BackP , List L)
+{
+ Position TmpCell;
+
+ TmpCell = FindPrevious( FrontP->Element, L );
+ TmpCell->Next = BackP;
+
+ TmpCell = FrontP;
+ FrontP->Next = BackP->Next;
+ BackP->Next = TmpCell;
+}
+
+void SwapDoublyAdjacnetNode( Position P1, Position P2 )
+{
+
+}
+
+int main()
+{
+ List L;
+ Position Front, Back;
+
+ L = malloc( sizeof( struct Node ) );
+
+ Insert(1, L, L);
+ Insert(2, L, L);
+ Insert(3, L, L);
+ Insert(4, L, L);
+
+ PrintList( L );
+
+ Front = Find(3, L);
+ Back = Find(2, L);
+
+ SwapSinglyAdjacentNode(Front, Back, L);
+
+ PrintList( L );
+
+ return 0;
+}
diff --git a/DSAA/chap3_lists_stacks_queues/ex_4&5.c b/DSAA/chap3_lists_stacks_queues/ex_4&5.c
new file mode 100644
index 0000000..40f2802
--- /dev/null
+++ b/DSAA/chap3_lists_stacks_queues/ex_4&5.c
@@ -0,0 +1,136 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "list.h"
+
+void Intersection( List L1, List L2, List InterList )
+{
+ Position P, TmpCell;
+
+ TmpCell = InterList;
+
+ for( L1 = L1->Next; L1 != NULL; L1 = L1->Next )
+ {
+ P = Find( L1->Element, L2 );
+ if( P )
+ {
+ TmpCell = Insert( P->Element, InterList, TmpCell);
+ }
+ }
+}
+
+/* This solution is dirty, needed to be modify */
+void Union( List L1, List L2, List UniList )
+{
+ Position P, TmpCell;
+ Position P1, P2;
+
+ TmpCell = UniList;
+ P1 = L1->Next;
+ P2 = L2->Next;
+
+ while( P1 && P2 )
+ {
+ if ( P1->Element == P2->Element )
+ {
+ TmpCell = Insert( P1->Element, UniList, TmpCell );
+ P1 = P1->Next;
+ P2 = P2->Next;
+ }
+
+ while( P2 && P1->Element > P2->Element )
+ {
+ TmpCell = Insert( P2->Element, UniList, TmpCell );
+ P2 = P2->Next;
+ }
+
+ while( P1 && P1->Element < P2->Element )
+ {
+ TmpCell = Insert( P1->Element, UniList, TmpCell );
+ P1 = P1->Next;
+ }
+ }
+
+ while( P1 )
+ {
+ TmpCell = Insert( P1->Element, UniList, TmpCell );
+ P1 = P1->Next;
+ }
+
+ while( P2 )
+ {
+ TmpCell = Insert( P2->Element, UniList, TmpCell );
+ P2 = P2->Next;
+ }
+}
+
+void
+UnionSectionVersion( List L1, List L2, List UniList )
+{
+ Position P, TmpCell;
+
+ TmpCell = UniList;
+
+ L1 = L1->Next;
+ L2 = L2->Next;
+
+ while( L1 || L2 )
+ {
+ if( !L1 )
+ {
+ TmpCell = Insert( L2->Element, UniList, TmpCell );
+ L2 = L2->Next;
+ continue;
+ }
+ if( !L2 )
+ {
+ TmpCell = Insert( L1->Element, UniList, TmpCell );
+ L1 = L1->Next;
+ continue;
+ }
+ if( L1->Element == L2->Element )
+ {
+ TmpCell = Insert( L1->Element, UniList, TmpCell );
+ L1 = L1->Next;
+ L2 = L2->Next;
+ continue;
+ }
+ if( L1->Element < L2->Element )
+ {
+ TmpCell = Insert( L1->Element, UniList, TmpCell );
+ L1 = L1->Next;
+ continue;
+ }
+ if( L1->Element > L2->Element)
+ {
+ TmpCell = Insert( L2->Element, UniList, TmpCell );
+ L2 = L2->Next;
+ continue;
+ }
+ }
+
+}
+
+int main()
+{
+ int Arr1[10] = {7, 6, 5, 4, 3, 2, 1};
+ int Arr2[10] = {10, 8, 6, 4, 2};
+ List L1, L2, InterList, UniList;
+
+ L1 = malloc( sizeof( struct Node ) );
+ L2 = malloc( sizeof( struct Node ) );
+ InterList = malloc( sizeof( struct Node ) );
+ UniList = malloc( sizeof( struct Node ) );
+
+ ConstructList( L1, Arr1, 7 );
+ ConstructList( L2, Arr2, 5 );
+
+ Intersection( L1, L2, InterList );
+ UnionSectionVersion( L1, L2, UniList );
+ PrintList( L1 );
+ PrintList( L2 );
+ PrintList( InterList );
+ PrintList( UniList );
+
+ return 0;
+}
diff --git a/DSAA/chap3_lists_stacks_queues/ex_6.c b/DSAA/chap3_lists_stacks_queues/ex_6.c
new file mode 100644
index 0000000..2a8f565
--- /dev/null
+++ b/DSAA/chap3_lists_stacks_queues/ex_6.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "list.h"
+
+
diff --git a/DSAA/chap3_lists_stacks_queues/list.c b/DSAA/chap3_lists_stacks_queues/list.c
new file mode 100644
index 0000000..23848bc
--- /dev/null
+++ b/DSAA/chap3_lists_stacks_queues/list.c
@@ -0,0 +1,145 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "list.h"
+
+/* Return true if L is empty */
+
+int
+IsEmpty( List L )
+{
+ return L->Next == NULL;
+}
+
+/* Return true if P is the last position in list L.
+ * Parameter L is unused in this implementation */
+
+int
+IsLast( Position P, List L )
+{
+ return P->Next == NULL;
+}
+
+/* Return Position of X in L; Null if not found */
+
+Position
+Find( ElementType X, List L )
+{
+ Position P;
+
+ P = L->Next;
+ while( P != NULL && P->Element != X )
+ P = P->Next;
+
+ return P;
+}
+
+/* Delete first occurrence of X from a list.
+ * Assume use of a header node */
+
+void Delete( ElementType X, List L )
+{
+ Position P, TmpCell;
+
+ P = FindPrevious(X, L);
+
+ if ( !IsLast(P, L) )
+ {
+ TmpCell = P->Next;
+ P->Next = TmpCell->Next;
+ free( TmpCell );
+ }
+}
+
+/* If X is not found, then Next field of returned.
+ * Position is NULL.
+ * Assumes a header */
+
+Position
+FindPrevious( ElementType X, List L )
+{
+ Position P;
+
+ P = L;
+ while( P->Next != NULL && P->Next->Element != X )
+ {
+ P = P->Next;
+ }
+
+ return P;
+}
+
+/* Insert (after legal position P).
+ * Header implementation assumed.
+ * Parameter L is unused in this implementation.
+ * Return the position of the inserted node */
+
+Position
+Insert( ElementType X, List L, Position P )
+{
+ Position TmpCell;
+
+ TmpCell = malloc( sizeof( struct Node ) );
+ if( TmpCell == NULL )
+ printf( "Out of space!" );
+
+ TmpCell->Element = X;
+ TmpCell->Next = P->Next;
+ P->Next = TmpCell;
+
+ return TmpCell;
+}
+
+void
+DeleteList( List L )
+{
+ Position P, Tmp;
+
+ P = L->Next;
+ L->Next = NULL;
+
+ while( P != NULL )
+ {
+ Tmp = P->Next;
+ free( P );
+ P = Tmp;
+ }
+}
+
+void ConstructList( List L, int Elements[], int Num )
+{
+ int i;
+
+ for( i = 0; i < Num; i++ )
+ {
+ Insert(Elements[i], L, L);
+ }
+}
+void
+PrintList( List L )
+{
+ Position P;
+ P = L->Next;
+
+ while( P != NULL )
+ {
+ printf( "%d->", P->Element );
+ P = P->Next;
+ }
+
+ printf( "\n" );
+}
+
+/*int*/
+/*main()*/
+/*{*/
+ /*Position P;*/
+ /*List L;*/
+
+ /*L = malloc( sizeof (struct Node) );*/
+ /*Insert(3,L, L);*/
+ /*Insert(5,L, L);*/
+ /*Insert(8, L, L);*/
+ /*P = FindPrevious(3, L);*/
+ /*PrintList(L);*/
+/*}*/
diff --git a/DSAA/chap3_lists_stacks_queues/list.h b/DSAA/chap3_lists_stacks_queues/list.h
new file mode 100644
index 0000000..04ad7df
--- /dev/null
+++ b/DSAA/chap3_lists_stacks_queues/list.h
@@ -0,0 +1,33 @@
+#ifndef _LIST_H
+#define _LIST_H
+
+struct Node;
+typedef int ElementType;
+typedef struct Node *PtrToNode;
+typedef PtrToNode List;
+typedef PtrToNode Position;
+
+List MakeEmpty( List L );
+int IsEmpty( List L );
+int IsLast( Position P, List L );
+Position Find( ElementType X, List L );
+void Delete( ElementType X, List L );
+Position FindPrevious( ElementType X, List L );
+Position Insert( ElementType X, List L, Position P );
+void DeleteList( List L );
+Position Header( List L );
+Position First( List L );
+Position Advance( Position P );
+ElementType Retrieve( Position P );
+void PrintList( List L );
+void ConstructList( List L, int Elements[], int Num );
+
+#endif /* _LIST_H */
+
+
+/* Place in the implementation file ? */
+struct Node
+{
+ ElementType Element;
+ Position Next;
+};
diff --git a/NjuSpider/njubbs/njubbs/__init__.py b/DSAA/chap3_lists_stacks_queues/polynomial_ADT.c
index e69de29..e69de29 100644
--- a/NjuSpider/njubbs/njubbs/__init__.py
+++ b/DSAA/chap3_lists_stacks_queues/polynomial_ADT.c
diff --git a/DSAA/chap3_lists_stacks_queues/polynomial_ADT.h b/DSAA/chap3_lists_stacks_queues/polynomial_ADT.h
new file mode 100644
index 0000000..f46b040
--- /dev/null
+++ b/DSAA/chap3_lists_stacks_queues/polynomial_ADT.h
@@ -0,0 +1,22 @@
+#ifndef _POLYNOMIAL_H
+#define _POLYNOMIAL_H
+
+typedef struct Node *PtrToNode;
+typedef PtrToNode Polynomial; /* Node sorted by exponent */
+
+void ZeroPolynomial( Polynomial Poly );
+void AddPolynomial( const Polynomial Poly1,
+ const Polynomial Poly2,
+ Polynomial PolySum );
+void MultPolynomial( const Polynomial Poly1,
+ const Polynomial Poly2,
+ Polynomial PolyProd );
+
+
+struct Node
+{
+ int Coefficient;
+ int Exponent;
+ PtrToNode Next;
+};
+
diff --git a/NjuSpider/njubbs/njubbs/__init__.pyc b/NjuSpider/njubbs/njubbs/__init__.pyc
deleted file mode 100644
index 1b1455b..0000000
--- a/NjuSpider/njubbs/njubbs/__init__.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/__pycache__/__init__.cpython-34.pyc b/NjuSpider/njubbs/njubbs/__pycache__/__init__.cpython-34.pyc
deleted file mode 100644
index c403ab5..0000000
--- a/NjuSpider/njubbs/njubbs/__pycache__/__init__.cpython-34.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/__pycache__/items.cpython-34.pyc b/NjuSpider/njubbs/njubbs/__pycache__/items.cpython-34.pyc
deleted file mode 100644
index 64081e6..0000000
--- a/NjuSpider/njubbs/njubbs/__pycache__/items.cpython-34.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/__pycache__/settings.cpython-34.pyc b/NjuSpider/njubbs/njubbs/__pycache__/settings.cpython-34.pyc
deleted file mode 100644
index ad47ee5..0000000
--- a/NjuSpider/njubbs/njubbs/__pycache__/settings.cpython-34.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/items.py b/NjuSpider/njubbs/njubbs/items.py
deleted file mode 100644
index 1857063..0000000
--- a/NjuSpider/njubbs/njubbs/items.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Define here the models for your scraped items
-#
-# See documentation in:
-# http://doc.scrapy.org/en/latest/topics/items.html
-
-import scrapy
-
-
-class njubbsItem(scrapy.Item):
- title = scrapy.Field()
- time = scrapy.Field()
- author = scrapy.Field()
- # define the fields for your item here like:
- # name = scrapy.Field()
- pass
-
-class QuoteItem(scrapy.Item):
- text = scrapy.Field()
- author = scrapy.Field()
diff --git a/NjuSpider/njubbs/njubbs/items.pyc b/NjuSpider/njubbs/njubbs/items.pyc
deleted file mode 100644
index 425f698..0000000
--- a/NjuSpider/njubbs/njubbs/items.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/pipelines.py b/NjuSpider/njubbs/njubbs/pipelines.py
deleted file mode 100644
index 69a7f7a..0000000
--- a/NjuSpider/njubbs/njubbs/pipelines.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Define your item pipelines here
-#
-# Don't forget to add your pipeline to the ITEM_PIPELINES setting
-# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
-
-
-class NjubbsPipeline(object):
- def process_item(self, item, spider):
- return item
diff --git a/NjuSpider/njubbs/njubbs/settings.py b/NjuSpider/njubbs/njubbs/settings.py
deleted file mode 100644
index cdec22d..0000000
--- a/NjuSpider/njubbs/njubbs/settings.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Scrapy settings for njubbs project
-#
-# For simplicity, this file contains only settings considered important or
-# commonly used. You can find more settings consulting the documentation:
-#
-# http://doc.scrapy.org/en/latest/topics/settings.html
-# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
-# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
-
-BOT_NAME = 'njubbs'
-
-SPIDER_MODULES = ['njubbs.spiders']
-NEWSPIDER_MODULE = 'njubbs.spiders'
-
-
-# Crawl responsibly by identifying yourself (and your website) on the user-agent
-#USER_AGENT = 'njubbs (+http://www.yourdomain.com)'
-
-# Obey robots.txt rules
-ROBOTSTXT_OBEY = False
-# ROBOTSTXT_OBEY = True
-
-# Configure maximum concurrent requests performed by Scrapy (default: 16)
-#CONCURRENT_REQUESTS = 32
-
-# Configure a delay for requests for the same website (default: 0)
-# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
-# See also autothrottle settings and docs
-#DOWNLOAD_DELAY = 3
-# The download delay setting will honor only one of:
-#CONCURRENT_REQUESTS_PER_DOMAIN = 16
-#CONCURRENT_REQUESTS_PER_IP = 16
-
-# Disable cookies (enabled by default)
-#COOKIES_ENABLED = False
-
-# Disable Telnet Console (enabled by default)
-#TELNETCONSOLE_ENABLED = False
-
-# Override the default request headers:
-#DEFAULT_REQUEST_HEADERS = {
-# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
-# 'Accept-Language': 'en',
-#}
-
-# Enable or disable spider middlewares
-# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
-#SPIDER_MIDDLEWARES = {
-# 'njubbs.middlewares.MyCustomSpiderMiddleware': 543,
-#}
-
-# Enable or disable downloader middlewares
-# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
-#DOWNLOADER_MIDDLEWARES = {
-# 'njubbs.middlewares.MyCustomDownloaderMiddleware': 543,
-#}
-
-# Enable or disable extensions
-# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
-#EXTENSIONS = {
-# 'scrapy.extensions.telnet.TelnetConsole': None,
-#}
-
-# Configure item pipelines
-# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
-#ITEM_PIPELINES = {
-# 'njubbs.pipelines.SomePipeline': 300,
-#}
-
-# Enable and configure the AutoThrottle extension (disabled by default)
-# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
-#AUTOTHROTTLE_ENABLED = True
-# The initial download delay
-#AUTOTHROTTLE_START_DELAY = 5
-# The maximum download delay to be set in case of high latencies
-#AUTOTHROTTLE_MAX_DELAY = 60
-# The average number of requests Scrapy should be sending in parallel to
-# each remote server
-#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
-# Enable showing throttling stats for every response received:
-#AUTOTHROTTLE_DEBUG = False
-
-# Enable and configure HTTP caching (disabled by default)
-# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
-#HTTPCACHE_ENABLED = True
-#HTTPCACHE_EXPIRATION_SECS = 0
-#HTTPCACHE_DIR = 'httpcache'
-#HTTPCACHE_IGNORE_HTTP_CODES = []
-#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
diff --git a/NjuSpider/njubbs/njubbs/settings.pyc b/NjuSpider/njubbs/njubbs/settings.pyc
deleted file mode 100644
index d3ab4e4..0000000
--- a/NjuSpider/njubbs/njubbs/settings.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/spiders/2 b/NjuSpider/njubbs/njubbs/spiders/2
deleted file mode 100644
index 51833d2..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/2
+++ /dev/null
@@ -1,21 +0,0 @@
-import json
-import scrapy
-from njubbs.items import njubbsItem
-
-
-class njubbsSpider(scrapy.Spider):
- name = "njubbs"
- start_urls = [
- 'http://bbs.nju.edu.cn/cache/t_act.js',
- ]
-
- def parse(self, response):
- strRe = "\"".join(response.text.split("\'"))
- print(strRe)
- strRe = strRe[5:-1]
- print(strRe)
- # print(response.text)
- for ncd in response.xpath('//div[@id=".p.ncd__act"]'):
- item = njubbsItem()
- yield ncd.xpath('td')
-
diff --git a/NjuSpider/njubbs/njubbs/spiders/__init__.py b/NjuSpider/njubbs/njubbs/spiders/__init__.py
deleted file mode 100644
index ebd689a..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# This package will contain the spiders of your Scrapy project
-#
-# Please refer to the documentation for information on how to create and manage
-# your spiders.
diff --git a/NjuSpider/njubbs/njubbs/spiders/__init__.pyc b/NjuSpider/njubbs/njubbs/spiders/__init__.pyc
deleted file mode 100644
index 5aa3307..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/__init__.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/spiders/__pycache__/__init__.cpython-34.pyc b/NjuSpider/njubbs/njubbs/spiders/__pycache__/__init__.cpython-34.pyc
deleted file mode 100644
index 63868d8..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/__pycache__/__init__.cpython-34.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/spiders/__pycache__/jwSpider.cpython-34.pyc b/NjuSpider/njubbs/njubbs/spiders/__pycache__/jwSpider.cpython-34.pyc
deleted file mode 100644
index 4aa8868..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/__pycache__/jwSpider.cpython-34.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/spiders/__pycache__/njubbsSpider.cpython-34.pyc b/NjuSpider/njubbs/njubbs/spiders/__pycache__/njubbsSpider.cpython-34.pyc
deleted file mode 100644
index 70284f9..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/__pycache__/njubbsSpider.cpython-34.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/spiders/__pycache__/quoteSpider.cpython-34.pyc b/NjuSpider/njubbs/njubbs/spiders/__pycache__/quoteSpider.cpython-34.pyc
deleted file mode 100644
index 0fd02c9..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/__pycache__/quoteSpider.cpython-34.pyc
+++ /dev/null
Binary files differ
diff --git a/NjuSpider/njubbs/njubbs/spiders/jwSpider.py b/NjuSpider/njubbs/njubbs/spiders/jwSpider.py
deleted file mode 100644
index 6bf39ba..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/jwSpider.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import scrapy
-from njubbs.items import njubbsItem
-
-class njubbsSpider(scrapy.Spider):
- name = "njujw"
- start_urls = [
- 'http://jw.nju.edu.cn/',
- ]
-
- def parse(self, response):
- # print(response.text)
- news = response.xpath('//div[@class="conbox1"]/div[@class="con1"]/ul/li')
- print(news)
- for li in news:
- title = li.xpath('a/@title').extract_first()
- print('\n')
- url = li.xpath('a/@href').extract_first()
- print(title)
- print(response.urljoin(url))
- print("\n")
-
-
- def articleParse(self, response):
- title = response.xpath('//div[@class="div_detail"]/div[@class="div_title"]/center').extract_first()
-
- print(title)
- contengt = response.xpath('//div[@class="div_detail"]/dev[@class="div_contentDetail"]')
-
diff --git a/NjuSpider/njubbs/njubbs/spiders/njubbsSpider.py b/NjuSpider/njubbs/njubbs/spiders/njubbsSpider.py
deleted file mode 100644
index de761cd..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/njubbsSpider.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import json
-import scrapy
-from njubbs.items import njubbsItem
-
-
-class njubbsSpider(scrapy.Spider):
- name = "njubbs"
- start_urls = [
- 'http://bbs.nju.edu.cn/cache/t_act.js',
- ]
-
- def parse(self, response):
- strRe = "\"".join(response.text.split("\'"))
- print(strRe)
- strRe = strRe[5:-24]
- jsonStr = json.JSONEncoder().encode(strRe)
- print(strRe)
- # print(response.text)
- for ncd in response.xpath('//div[@id=".p.ncd__act"]'):
- item = njubbsItem()
- yield ncd.xpath('td')
-
diff --git a/NjuSpider/njubbs/njubbs/spiders/quoteSpider.py b/NjuSpider/njubbs/njubbs/spiders/quoteSpider.py
deleted file mode 100644
index 66f86ea..0000000
--- a/NjuSpider/njubbs/njubbs/spiders/quoteSpider.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import scrapy
-from njubbs.items import QuoteItem
-class QuotesSpider(scrapy.Spider):
- name = "quotes"
- start_urls = [
- 'http://quotes.toscrape.com/page/1/',
- 'http://quotes.toscrape.com/page/2/',
- ]
-def parse(self, response):
- for quote in response.xpath('//div[@class="quote"]'):
- item = QuoteItem()
- item['text'] = quote.xpath('span[@class="text"]/text()').extract_first()
- item['author'] = quote.xpath('span/small/text()').extract_first()
- yield item
diff --git a/NjuSpider/njubbs/scrapy.cfg b/NjuSpider/njubbs/scrapy.cfg
deleted file mode 100644
index 0aba25d..0000000
--- a/NjuSpider/njubbs/scrapy.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-# Automatically created by: scrapy startproject
-#
-# For more information about the [deploy] section see:
-# https://scrapyd.readthedocs.org/en/latest/deploy.html
-
-[settings]
-default = njubbs.settings
-
-[deploy]
-#url = http://localhost:6800/
-project = njubbs
diff --git a/NjuSpider/python b/NjuSpider/python
deleted file mode 100755
index ef0c6ab..0000000
--- a/NjuSpider/python
+++ /dev/null
Binary files differ
diff --git a/SICP/ch1_3.scm b/SICP/ch1_3.scm
new file mode 100644
index 0000000..76fadb7
--- /dev/null
+++ b/SICP/ch1_3.scm
@@ -0,0 +1,33 @@
+(define (cube x) (* x x x))
+
+;; Sum from a to b.
+
+(define (sum term a next b)
+ (if (> a b )
+ 0
+ (+ (term a)
+ (sum term (next a) next b))))
+
+(define (inc n) (+ n 1))
+(define (sum-cubes a b)
+ (sum cube a inc b))
+
+(sum-cubes 1 10)
+
+(define (identity x) x)
+(define (sum-integers a b)
+ (sum identity a inc b))
+(sum-integers 1 10)
+
+(define (pi-sum a b)
+ (define (pi-term x)
+ (/ 1.0 (* x (+ x 2))))
+ (define (pi-next x)
+ (+ x 4))
+(* 8 (sum pi-term a pi-next b)))
+(pi-sum 1 1000)
+
+(define (intergral f a b dx)
+ (define (add-dx x) (+ x dx))
+ (* (sum f (+ a (/ dx 2.0)) add-dx b) dx))
+(intergral cube 0 1 0.0001)