init: initial commit
This commit is contained in:
92
main/main.go
Normal file
92
main/main.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.dvdrw.dev/nsmarter/scraper/scraper"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var Log = log.Default()
|
||||
|
||||
const defaultChunkSize = 5
|
||||
const defaultLimitQuerySize = 0
|
||||
|
||||
var stationDbPath string
|
||||
var innerCityOnly bool = false
|
||||
var chunkSize = 5
|
||||
var apiEndpoint string = "https://online.nsmart.rs/publicapi/v1/announcement/announcement.php"
|
||||
var apiKey string
|
||||
var limitQuerySize = 0
|
||||
|
||||
func parseEnvars() {
|
||||
for _, e := range os.Environ() {
|
||||
pair := strings.SplitN(e, "=", 2)
|
||||
switch pair[0] {
|
||||
case "STATIONS_DB_PATH":
|
||||
stationDbPath = pair[1]
|
||||
case "INNER_CITY_ONLY":
|
||||
innerCityOnly = pair[1] == "1"
|
||||
case "CHUNK_SIZE":
|
||||
var err error
|
||||
chunkSize, err = strconv.Atoi(pair[1])
|
||||
if err != nil {
|
||||
Log.Printf("WARN: Invalid value for CHUNK_SIZE. Falling back to default value (%v)\n", defaultChunkSize)
|
||||
chunkSize = defaultChunkSize
|
||||
}
|
||||
case "LIMIT_QUERY_SIZE":
|
||||
var err error
|
||||
limitQuerySize, err = strconv.Atoi(pair[1])
|
||||
if err != nil {
|
||||
Log.Printf("WARN: Invalid value for LIMIT_QUERY_SIZE. Falling back to default value (%v)\n", defaultLimitQuerySize)
|
||||
limitQuerySize = defaultLimitQuerySize
|
||||
}
|
||||
case "API_ENDPOINT":
|
||||
apiEndpoint = pair[1]
|
||||
case "API_KEY":
|
||||
apiKey = pair[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Parse out config from environment variables
|
||||
parseEnvars()
|
||||
|
||||
if stationDbPath == "" {
|
||||
log.Fatal("Environment variable STATIONS_DB_PATH empty!")
|
||||
}
|
||||
|
||||
stations, err := readDb()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
Log.Printf("Finished importing stations! (Total: %v)", len(stations))
|
||||
|
||||
// Split stations so as to be scraped in chunks
|
||||
var stationChunks [][]scraper.Station
|
||||
for i := 0; i < len(stations); i += chunkSize {
|
||||
end := i + chunkSize
|
||||
|
||||
if end > len(stations) {
|
||||
end = len(stations)
|
||||
}
|
||||
|
||||
stationChunks = append(stationChunks, stations[i:end])
|
||||
}
|
||||
|
||||
results := make(chan []scraper.ScrapeResult, 200)
|
||||
for _, chunk := range stationChunks {
|
||||
go scraper.ScheduleScrape(chunk,
|
||||
results,
|
||||
scraper.ApiConfig{Endpoint: apiEndpoint,
|
||||
Key: apiKey})
|
||||
}
|
||||
|
||||
for r := range results {
|
||||
fmt.Printf("Received data: %#v\n", r)
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user