Commit 13d30431 authored by Jacky Lin's avatar Jacky Lin
Browse files

Finish the first version

parent 91e305f9
......@@ -53,7 +53,8 @@ func indexHandler(w http.ResponseWriter, _ *http.Request) {
}
}
func searchHandler(newsapi *Client) http.HandlerFunc {
// searchHandler handle the search inquiries and send the search result back to web
func searchHandler(client *Client) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
u, err := url.Parse(r.URL.String())
if err != nil {
......@@ -68,7 +69,7 @@ func searchHandler(newsapi *Client) http.HandlerFunc {
page = "1"
}
results, err := newsapi.FetchEverything(searchQuery)
results, err := client.FetchEverything(searchQuery)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
......@@ -83,7 +84,7 @@ func searchHandler(newsapi *Client) http.HandlerFunc {
search := &SearchRet{
Query: searchQuery,
NextPage: nextPage,
TotalPages: int(math.Ceil(float64(results.TotalResults / newsapi.PageSize))),
TotalPages: int(math.Ceil(float64(results.TotalResults / client.PageSize))),
Results: results,
}
......@@ -118,13 +119,13 @@ func main() {
}
myClient := &http.Client{Timeout: 10 * time.Second}
newsapi := NewClient(myClient, 20)
client := NewClient(myClient, 20)
fs := http.FileServer(http.Dir("assets"))
mux := http.NewServeMux()
mux.Handle("/assets/", http.StripPrefix("/assets/", fs))
mux.HandleFunc("/search", searchHandler(newsapi))
mux.HandleFunc("/search", searchHandler(client))
mux.HandleFunc("/", indexHandler)
err = http.ListenAndServe(":"+port, mux)
if err != nil {
......
......@@ -6,7 +6,8 @@ import (
"strings"
)
func printContents(contents []string, start int, end int) (str string) {
// getContents used to concatenate all strings in contents to an output string
func getContents(contents []string, start int, end int) (str string) {
str = ""
for i := start; i < end; i++ {
str += contents[i]
......@@ -66,8 +67,3 @@ func reformatSearchKey(key string) string {
reformatKey = strings.Join(splitKey, "+")
return reformatKey
}
func splitParagraph(paragraph string) {
splitKey := strings.Split(paragraph, "\n")
fmt.Println(splitKey)
}
......@@ -4,24 +4,6 @@ import (
"net/http"
)
type Article struct {
Author string `json:"author"`
Title string `json:"title"`
Description string `json:"description"`
URL string `json:"url"`
Content string `json:"content"`
}
type Results struct {
TotalResults int `json:"totalResults"`
Articles []Article `json:"articles"`
}
type Client struct {
http *http.Client
PageSize int
}
func (c *Client) FetchEverything(query string) (*Results, error) {
description, title, contents, link := Search(query)
a := Article{
......@@ -31,12 +13,12 @@ func (c *Client) FetchEverything(query string) (*Results, error) {
URL: link,
Content: contents,
}
alist := []Article{a}
aList := []Article{a}
res := &Results{
TotalResults: 1,
Articles: alist,
Articles: aList,
}
return res, nil //json.Unmarshal(body, res)
return res, nil
}
func NewClient(httpClient *http.Client, pageSize int) *Client {
......
package main
type Result struct {
Title string `json:"title"`
URL string `json:"url"`
Content string `json:"content"`
import "net/http"
// Article represents the article found on wikipedia
type Article struct {
Author string `json:"author"`
Title string `json:"title"`
Description string `json:"description"`
URL string `json:"url"`
Content string `json:"content"`
}
// Results stores the top ranked relevant articles from the search results
type Results struct {
TotalResults int `json:"totalResults"`
Articles []Article `json:"articles"`
}
type Client struct {
http *http.Client
PageSize int
}
......@@ -98,13 +98,12 @@ func wikiIntroScrape(wikiLink string) (description string, title string, content
// Else append to our paragraphs to show later
paragraphs = append(paragraphs, selection.Text())
}
})
// Print the title
// Print the title, Check the info
fmt.Printf("Introduction of %s:\n\n", pageTitle)
fmt.Println(paragraphs[0])
// Print the content of paragraph
content := printContents(paragraphs, 0, len(paragraphs))
// Obtain the content of paragraph
content := getContents(paragraphs, 0, len(paragraphs))
return paragraphs[0], pageTitle, content
}
......@@ -119,7 +118,3 @@ func Search(key string) (description string, title string, contents string, wiki
description, title, contents = wikiIntroScrape(wikiLink)
return description, title, contents, wikiLink
}
func mainq() {
wikiIntroScrape("https://en.wikipedia.org/wiki/Iron_Man")
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment