I can't return string slice like I would like to. Passing just the last one - string

I wrote this code to get the list of the file in directory, appending the names in a slice and one by one open them, after I open a file I search for some words in the file and if found write them in a new file.
But I always get the same words in the new files and I can't figure out why
package main
import (
"bufio"
"fmt"
"io/ioutil"
"log"
"os"
"strings"
"time"
)
const dir_to_read_path string = "path"
func main() {
start := time.Now()
temp_string_filename := ""
temp_string_filename_counter := 0
//defer list_file()
// just pass the file name
for k := range list_file() {
temp_string_filename = list_file()[temp_string_filename_counter]
if true {
k = k
}
temp_string_filename_counter++
b, err := ioutil.ReadFile(temp_string_filename)
if err != nil {
fmt.Print(err)
}
// convert content to a 'string'
str := string(b)
control_params := []string{"numpy", "grabscreen", "cv2", "time", "os", "pandas", "tqdm", "collections", "models", "random", "inception_v3", "googlenet", "shuffle", "getkeys", "tflearn", "directkeys", "statistics", "motion", "tflearn.layers.conv", "conv_2d", "max_pool_2d", "avg_pool_2d", "conv_3d", "max_pool_3d", "avg_pool_3d"}
temp_string_filename = dir_to_read_path + "output_" + temp_string_filename
fmt.Println("Writing file n. ", k)
file, err := os.Create(temp_string_filename)
if err != nil {
log.Fatal("Cannot create file", err)
}
for _, z := range isValueInList(control_params, str, list_file()) {
fmt.Fprintf(file, z)
fmt.Fprintf(file, "\n")
}
defer file.Close()
elapsed := time.Since(start)
log.Printf("Execution took %s", elapsed)
}
}
func isValueInList(list []string, file_string string, read_file []string) []string {
encountered_modules := make([]string, 0, 10)
temp_string_filename := ""
temp_string_filename_counter := 0
encountered := map[string]bool{}
result := make([]string, 0, 10)
final_result := [][]string{}
for z := range read_file {
fmt.Println("Reading file n. ", z)
temp_string_filename = read_file[temp_string_filename_counter]
f, _ := os.Open(temp_string_filename)
defer f.Close()
scanner := bufio.NewScanner(f)
scanner.Split(bufio.ScanWords)
for scanner.Scan() {
line := scanner.Text()
for _, v := range list {
if v == line {
encountered_modules = append(encountered_modules, line)
}
}
}
for v := range encountered_modules {
if encountered[encountered_modules[v]] == true {
// Do not add duplicate.
} else {
// Record this element as an encountered element.
encountered[encountered_modules[v]] = true
result = append(result, encountered_modules[v])
}
}
temp_string_filename_counter++
final_result = append(final_result, result)
}
return result
}
func list_file() []string {
files_names := make([]string, 0, 10)
files, err := ioutil.ReadDir("./")
if err != nil {
log.Fatal(err)
}
for _, f := range files {
if strings.HasSuffix(f.Name(), ".txt") {
files_names = append(files_names, string(f.Name()))
}
}
return files_names
}

It's hard to be sure, since your code is difficult to read, but this looks particularly suspicious (in pseudocode),
// main
for each file in list_file() {
result = {
// isValueInList
var result
for each file in list_file() {
for each word in file {
if word in wordlist and not in result {
result = append(result, word)
}
}
}
// all the words in wordlist in any of the files
return result
}
// main
write result
}
There are other problems with your code.
Here's a more readable example (a first draft), of what you appear to be trying to do (Python modules in Python files?):
package main
import (
"bufio"
"bytes"
"fmt"
"io/ioutil"
"os"
"path/filepath"
)
var modules = map[string]bool{
"numpy": true, "grabscreen": true, "cv2": true, "time": true, "os": true, "pandas": true, "tqdm": true, "collections": true,
"models": true, "random": true, "inception_v3": true, "googlenet": true, "shuffle": true, "getkeys": true, "tflearn": true,
"directkeys": true, "statistics": true, "motion": true, "tflearn.layers.conv": true, "conv_2d": true,
"max_pool_2d": true, "avg_pool_2d": true, "conv_3d": true, "max_pool_3d": true, "avg_pool_3d": true,
}
func findWords(filename string, lexicon map[string]bool) error {
f, err := os.Open(filename)
if err != nil {
return err
}
defer f.Close()
words := make(map[string]bool)
s := bufio.NewScanner(f)
s.Split(bufio.ScanWords)
for s.Scan() {
word := s.Text()
if _, exists := lexicon[word]; exists {
words[word] = true
}
}
if s.Err(); err != nil {
return err
}
var buf bytes.Buffer
for word := range words {
buf.WriteString(word)
buf.WriteString("\n")
}
if buf.Len() > 0 {
err := ioutil.WriteFile(filename+`.words`, buf.Bytes(), 0666)
if err != nil {
return err
}
}
return nil
}
func main() {
dir := `./`
files, err := ioutil.ReadDir(dir)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
for _, file := range files {
filename := file.Name()
if filepath.Ext(filename) != ".py" {
continue
}
findWords(filename, modules)
if err != nil {
fmt.Fprintln(os.Stderr, err)
}
}
}

There are a few mistakes in your code, so i've rewritten most of the code.
What i did :
1) open a file
2) read a line
3) compare it
4) check if the target file exists
5) if not, create it
6) if it does, append to it
7) write to it
8) close target file
9) goto 2 if there are more lines
10) goto 1 if there are more files
I tried to make it as much as readable for everybody so that everybody can understand it.
package main
import (
"bufio"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"strconv"
"strings"
"time"
)
const readDir string = "./"
var startTime time.Time
func main() {
for noFile, fileName := range listFile() {
startTime = time.Now()
fileInput, err := os.Open(fileName)
if err != nil {
log.Fatal(err)
}
defer fileInput.Close()
scanner := bufio.NewScanner(fileInput)
for scanner.Scan() {
for _, targetContent := range []string{"numpy", "grabscreen", "cv2", "time", "os", "pandas", "tqdm", "collections", "models", "random", "inception_v3", "googlenet", "shuffle", "getkeys", "tflearn", "directkeys", "statistics", "motion", "tflearn.layers.conv", "conv_2d", "max_pool_2d", "avg_pool_2d", "conv_3d", "max_pool_3d", "avg_pool_3d"} {
if strings.Contains(scanner.Text(), targetContent) {
if _, err := os.Stat(readDir + "output_" + strconv.Itoa(noFile)); os.IsNotExist(err) {
fmt.Println("File : " + readDir + "output_" + strconv.Itoa(noFile) + " does not exists, creating it now!")
createFile, err := os.Create(readDir + "output_" + strconv.Itoa(noFile))
if err != nil {
panic(err)
}
createFile.Close()
}
fileOutput, err := os.OpenFile(readDir+"output_"+strconv.Itoa(noFile), os.O_APPEND|os.O_WRONLY, 0600)
if err != nil {
panic(err)
}
if _, err = fileOutput.WriteString("contains : " + targetContent + " in : " + scanner.Text() + "\n"); err != nil {
panic(err)
}
fileOutput.Close()
fmt.Println("Writing file : ", readDir+"output_"+strconv.Itoa(noFile))
fmt.Println("contains : " + targetContent + " in : " + scanner.Text())
}
}
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
log.Printf("Execution took %s", time.Since(startTime))
}
}
func listFile() []string {
filesNames := make([]string, 0, 100)
files, err := ioutil.ReadDir(readDir)
if err != nil {
log.Fatal(err)
}
for _, f := range files {
if strings.HasSuffix(f.Name(), ".txt") {
fileName, err := filepath.Abs(string(f.Name()))
if err != nil {
log.Fatal(err)
}
filesNames = append(filesNames, fileName)
}
}
return filesNames
}

Related

How to split string two between characters

I want to split a string up between two characters( {{ and }} ).
I have an string like {{number1}} + {{number2}} > {{number3}}
and I'm looking for something that returns:
[number1, number2, number3]
You can try it with Regex:
s := "{{number1}} + {{number2}} > {{number3}}"
// Find all substrings in form {<var name>}
re := regexp.MustCompile("{[a-z]*[0-9]*[a-z]*}")
nums := re.FindAllString(s, -1)
// Remove '{' and '}' from all substrings
for i, _ := range nums {
nums[i] = strings.TrimPrefix(nums[i], "{")
nums[i] = strings.TrimSuffix(nums[i], "}")
}
fmt.Println(nums) // output: [number1 number2 number3]
You can experiment with regex here: https://regex101.com/r/kkPWAS/1
Use the regex [A-Za-z]+[0-9] and filter the alpha numeric parts of the string as string array.
package main
import (
"fmt"
"regexp"
)
func main() {
s := `{{number1}} + {{number2}} > {{number3}}`
re := regexp.MustCompile("[A-Za-z]+[0-9]")
p := re.FindAllString(s, -1)
fmt.Println(p) //[number1 number2 number3]
}
the hard way using the template parser ^^
package main
import (
"fmt"
"strings"
"text/template/parse"
)
func main() {
input := "{{number1}} + {{number2}} > {{number3}}"
out := parseit(input)
fmt.Printf("%#v\n", out)
}
func parseit(input string) (out []string) {
input = strings.Replace(input, "{{", "{{.", -1) // Force func calls to become variables.
tree, err := parse.Parse("", input, "{{", "}}")
if err != nil {
panic(err)
}
visit(tree[""].Root, func(n parse.Node) bool {
x, ok := n.(*parse.FieldNode)
if ok {
out = append(out, strings.Join(x.Ident, "."))
}
return true
})
return
}
func visit(n parse.Node, fn func(parse.Node) bool) bool {
if n == nil {
return true
}
if !fn(n) {
return false
}
if l, ok := n.(*parse.ListNode); ok {
for _, nn := range l.Nodes {
if !visit(nn, fn) {
continue
}
}
}
if l, ok := n.(*parse.RangeNode); ok {
if !visit(l.BranchNode.Pipe, fn) {
return false
}
if l.BranchNode.List != nil {
if !visit(l.BranchNode.List, fn) {
return false
}
}
if l.BranchNode.ElseList != nil {
if !visit(l.BranchNode.ElseList, fn) {
return false
}
}
}
if l, ok := n.(*parse.ActionNode); ok {
for _, c := range l.Pipe.Decl {
if !visit(c, fn) {
continue
}
}
for _, c := range l.Pipe.Cmds {
if !visit(c, fn) {
continue
}
}
}
if l, ok := n.(*parse.CommandNode); ok {
for _, a := range l.Args {
if !visit(a, fn) {
continue
}
}
}
if l, ok := n.(*parse.PipeNode); ok {
for _, a := range l.Decl {
if !visit(a, fn) {
continue
}
}
for _, a := range l.Cmds {
if !visit(a, fn) {
continue
}
}
}
return true
}
If it happens you really were manipulating template string, but fails to do so due to function calls and that you do not want to execute this input = strings.Replace(input, "{{", "{{.", -1) // Force func calls to become variables.
You can always force load a template using functions similar to
var reMissingIdent = regexp.MustCompile(`template: :[0-9]+: function "([^"]+)" not defined`)
func ParseTextTemplateAnyway(s string) (*texttemplate.Template, texttemplate.FuncMap, error) {
fn := texttemplate.FuncMap{}
for {
t, err := texttemplate.New("").Funcs(fn).Parse(s)
if err == nil {
return t, fn, err
}
s := err.Error()
res := reMissingIdent.FindAllStringSubmatch(s, -1)
if len(res) > 0 {
fn[res[0][1]] = func(s ...interface{}) string { return "" }
} else {
return t, fn, err
}
}
// return nil, nil
}
You don't need to use libraries. You can create your own function.
package main
const r1 = '{'
const r2 = '}'
func GetStrings(in string) (out []string) {
var tren string
wr := false
f := true
for _, c := range in {
if wr && c != r2 {
tren = tren + string(c)
}
if c == r1 {
f = !f
wr = f
}
if c == r2 {
wr = false
if f {
out = append(out, tren)
tren = ""
}
f = !f
}
}
return
}

unexpected EOF with fmt.Scanner

If I want to scan through a string, I can do this:
package main
import (
"fmt"
"strings"
)
func main() {
r := strings.NewReader("west north east")
for {
var s string
_, e := fmt.Fscan(r, &s)
fmt.Printf("%q %v\n", s, e)
if e != nil { break }
}
}
Result:
"west" <nil>
"north" <nil>
"east" <nil>
"" EOF
I recently discovered fmt.Scanner [1], so I thought I would try to implement
it. I came up with this:
package main
import (
"fmt"
"strings"
)
type comma struct { tok string }
func (c *comma) Scan(state fmt.ScanState, verb rune) error {
tok, err := state.Token(false, func(r rune) bool {
return r != ','
})
if err != nil {
return err
}
if _, _, err := state.ReadRune(); err != nil {
if len(tok) == 0 {
return err
}
}
c.tok = string(tok)
return nil
}
func main() {
r := strings.NewReader("west,north,east")
for {
var c comma
_, e := fmt.Fscan(r, &c)
fmt.Printf("%q %v\n", c.tok, e)
if e != nil { break }
}
}
Result:
"west" <nil>
"north" <nil>
"east" <nil>
"" unexpected EOF
So the result is pretty close, but what bothers me is the unexpected EOF. Is
it possible to just get a regular EOF with a custom fmt.Scanner? Am I doing
something wrong here, or is this a bug?
https://golang.org/pkg/fmt#Scanner
Thanks to Ian Lance Taylor on the golang-nuts list, he suggested to panic
the error instead of return. In the Go code, Fscan calls a function
doScan, which in turn calls a function errorHandler [1]. This last function
uses recover to turn any panic into regular error. This program gives
idential output to my original example:
package main
import (
"fmt"
"strings"
)
type comma struct { tok string }
func (c *comma) Scan(state fmt.ScanState, verb rune) error {
tok, err := state.Token(false, func(r rune) bool {
return r != ','
})
if err != nil { return err }
if _, _, err := state.ReadRune(); err != nil {
if len(tok) == 0 {
panic(err)
}
}
c.tok = string(tok)
return nil
}
func main() {
r := strings.NewReader("west,north,east")
for {
var c comma
_, err := fmt.Fscan(r, &c)
fmt.Printf("%q %v\n", c.tok, err)
if err != nil { break }
}
}
https://github.com/golang/go/blob/go1.16.4/src/fmt/scan.go#L1056-L1067

Expose kubernetes logs to browser through websocket

I am trying to use sidecar mode in kubernetes to create a logs sidecar to expose specific container logs. And I am using kubernetes client to fetch logs from kubernetes api and send it out by websocket. The code shows below:
func serveWs(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
conn, err := upgrader.Upgrade(w, r, nil)
if err != nil {
if _, ok := err.(websocket.HandshakeError); !ok {
log.Println(err)
}
return
}
defer conn.Close()
logsClient, err := InitKubeLogsClient(config.InCluster)
if err != nil {
log.Fatalln(err)
}
stream, err := logsClient.GetLogs(config.Namespace, config.PodName, config.ContainerName)
if err != nil {
log.Fatalln(err)
}
defer stream.Close()
reader := bufio.NewReader(stream)
for {
line, err := reader.ReadString('\n')
if err != nil {
log.Fatalln(err)
}
conn.WriteMessage(websocket.TextMessage, []byte(line))
}
}
I am using https://github.com/gorilla/websocket as the websocket lib. And on the browser
Is this the best way to do what I want? Is there some better way to just expose the logs api from k8s to websocket?
Put my final code here, thanks for the tips from #Peter:
func serveWs(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
conn, err := upgrader.Upgrade(w, r, nil)
if err != nil {
if _, ok := err.(websocket.HandshakeError); !ok {
log.Println(err)
}
return
}
log.Println("create new connection")
defer func() {
conn.Close()
log.Println("connection close")
}()
logsClient, err := InitKubeLogsClient(config.InCluster)
if err != nil {
log.Println(err)
return
}
stream, err := logsClient.GetLogs(config.Namespace, config.PodName, config.ContainerName)
if err != nil {
log.Println(err)
return
}
defer stream.Close()
reader := bufio.NewReaderSize(stream, 16)
lastLine := ""
for {
data, isPrefix, err := reader.ReadLine()
if err != nil {
log.Println(err)
return
}
lines := strings.Split(string(data), "\r")
length := len(lines)
if len(lastLine) > 0 {
lines[0] = lastLine + lines[0]
lastLine = ""
}
if isPrefix {
lastLine = lines[length-1]
lines = lines[:(length - 1)]
}
for _, line := range lines {
if err := conn.WriteMessage(websocket.TextMessage, []byte(line)); err != nil {
log.Println(err)
return
}
}
}
}

Reading from reader until a string is reached

I am trying to write a function to keep reading from a buffered reader until I hit a certain string, then to stop reading and return everything read prior to that string.
In other words, I want to do the same thing as reader.ReadString() does, except taking a string instead of a single byte.
For instance:
mydata, err := reader.ReadString("\r\n.\r\n") //obviously will not compile
How can I do this?
Thanks in advance,
Twichy
Amendment 1: Previous attempt
Here is my previous attempt; its badly written and doesnt work but hopefully it demonstrates what I am trying to do.
func readDotData(reader *bufio.Reader)(string, error){
delims := []byte{ '\r', '\n', '.', '\r', '\n'}
curpos := 0
var buffer []byte
for {
curpos = 0
data, err := reader.ReadSlice(delims[0])
if err!=nil{ return "", err }
buffer = append(buffer, data...)
for {
curpos++
b, err := reader.ReadByte()
if err!=nil{ return "", err }
if b!=delims[curpos]{
for curpos >= 0{
buffer = append(buffer, delims[curpos])
curpos--
}
break
}
if curpos == len(delims){
return string(buffer[len(buffer)-1:]), nil
}
}
}
panic("unreachable")
}
package main
import (
"bytes"
"fmt"
"log"
)
type reader interface {
ReadString(delim byte) (line string, err error)
}
func read(r reader, delim []byte) (line []byte, err error) {
for {
s := ""
s, err = r.ReadString(delim[len(delim)-1])
if err != nil {
return
}
line = append(line, []byte(s)...)
if bytes.HasSuffix(line, delim) {
return line[:len(line)-len(delim)], nil
}
}
}
func main() {
src := bytes.NewBufferString("123deli456elim789delimABCdelimDEF")
for {
b, err := read(src, []byte("delim"))
if err != nil {
log.Fatal(err)
}
fmt.Printf("%q\n", b)
}
}
Playground
Output:
"123deli456elim789"
"ABC"
2009/11/10 23:00:00 EOF
http://play.golang.org/p/BpA5pOc-Rn
package main
import (
"bytes"
"fmt"
)
func main() {
b := bytes.NewBuffer([]byte("Hello, playground!\r\n.\r\nIrrelevant trailer."))
c := make([]byte, 0, b.Len())
for {
p := b.Bytes()
if bytes.Equal(p[:5], []byte("\r\n.\r\n")) {
fmt.Println(string(c))
return
}
c = append(c, b.Next(1)...)
}
}
For example,
package main
import (
"bufio"
"bytes"
"fmt"
"strings"
)
var delim = []byte{'\r', '\n', '.', '\r', '\n'}
func ScanLines(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
for i := 0; i+len(delim) <= len(data); {
j := i + bytes.IndexByte(data[i:], delim[0])
if j < i {
break
}
if bytes.Equal(data[j+1:j+len(delim)], delim[1:]) {
// We have a full delim-terminated line.
return j + len(delim), data[0:j], nil
}
i = j + 1
}
// If we're at EOF, we have a final, non-terminated line. Return it.
if atEOF {
return len(data), data, nil
}
// Request more data.
return 0, nil, nil
}
func main() {
delims := string(delim)
input := "1234" + delims + "5678" + delims + "1234567901234567890" + delims
scanner := bufio.NewScanner(strings.NewReader(input))
scanner.Split(ScanLines)
for scanner.Scan() {
fmt.Printf("%s\n", scanner.Text())
}
if err := scanner.Err(); err != nil {
fmt.Printf("Invalid input: %s", err)
}
}
Output:
1234
5678
1234567901234567890
Because you have the same byte in the string, you can do it as below:
func readWithEnd(reader *bufio.Reader) ([]byte, error) {
message, err := reader.ReadBytes('#')
if err != nil {
return nil, err
}
a1, err := reader.ReadByte()
if err != nil {
return nil, err
}
message = append(message, a1)
if a1 != '\t' {
message2, err := readWithEnd(reader)
if err != nil {
return nil, err
}
ret := append(message, message2...)
return ret, nil
}
a2, err := reader.ReadByte()
if err != nil {
return nil, err
}
message = append(message, a2)
if a2 != '#' {
message2, err := readWithEnd(reader)
if err != nil {
return nil, err
}
ret := append(message, message2...)
return ret, nil
}
return message, nil
}
This is the sample that can recognize the "#\t#" in TCP connection

Using a Goroutine actually takes longer to execute

I'm sure that I'm doing something wrong, I have a Go program that parses in 3D models in OBJ format and outputs a json object. When I run it without adding in goroutines I get the following output:
$ go run objParser.go ak47.obj extincteur_obj.obj
--Creating ak47.json3d from ak47.obj
--Exported 85772 faces with 89088 verticies
--Creating extincteur_obj.json3d from extincteur_obj.obj
--Exported 150316 faces with 151425 verticies
Parsed 2 files in 8.4963s
Then I added in the goroutines and I get this output:
$ go run objParser.go ak47.obj extincteur_obj.obj
--Creating ak47.json3d from ak47.obj
--Creating extincteur_obj.json3d from extincteur_obj.obj
--Exported 85772 faces with 89088 verticies
--Exported 150316 faces with 151425 verticies
Parsed 2 files in 10.23137s
The order of how it's printed is what I expected given the interlacing of the parsing but I have no idea why it actually takes longer! The code is pretty long, I snipped what I could but it's still pretty long, sorry about that!
package main
func parseFile(name string, finished chan int) {
var Verts []*Vertex
var Texs []*TexCoord
var Faces []*Face
var objFile, mtlFile, jsonFile *os.File
var parseMaterial bool
// Set up files and i/o
inName := name
outName := strings.Replace(inName, ".obj", ".json3d", -1)
parseMaterial = false
fmt.Printf("--"+FgGreen+"Creating"+Reset+" %s from %s\n", outName, inName)
var err error
var part []byte
var prefix bool
if objFile, err = os.Open(inName); err != nil {
fmt.Println(FgRed+"!!Failed to open input file!!"+Reset)
return
}
if jsonFile, err = os.Create(outName); err != nil {
fmt.Println(FgRed+"!!Failed to create output file!!"+Reset)
return
}
reader := bufio.NewReader(objFile)
writer := bufio.NewWriter(jsonFile)
buffer := bytes.NewBuffer(make([]byte, 1024))
// Read the file in and parse out what we need
for {
if part, prefix, err = reader.ReadLine(); err != nil {
break
}
buffer.Write(part)
if !prefix {
line := buffer.String()
if(strings.Contains(line, "v ")) {
Verts = append(Verts, parseVertex(line))
} else if(strings.Contains(line, "vt ")) {
Texs = append(Texs, parseTexCoord(line))
} else if(strings.Contains(line, "f ")) {
Faces = append(Faces, parseFace(line, Verts, Texs))
} else if(strings.Contains(line, "mtllib ")) {
mtlName := strings.Split(line, " ")[1]
if mtlFile, err = os.Open(mtlName); err != nil {
fmt.Printf("--"+FgRed+"Failed to find material file: %s\n"+Reset, mtlName)
parseMaterial = false
} else {
parseMaterial = true
}
}
buffer.Reset()
}
}
if err == io.EOF {
err = nil
}
objFile.Close()
// Write out the data
writer.WriteString("{\"obj\":[\n");
// Write out the verts
writer.WriteString("{\"vrt\":[\n");
for i, vert := range Verts {
writer.WriteString(vert.String())
if i < len(Verts) - 1 { writer.WriteString(",") }
writer.WriteString("\n")
}
// Write out the faces
writer.WriteString("],\"fac\":[\n")
for i, face := range Faces {
writer.WriteString(face.String(true))
if i < len(Faces) - 1 { writer.WriteString(",") }
writer.WriteString("\n")
}
// Write out the normals
writer.WriteString("],\"nrm\":[")
for i, face := range Faces {
writer.WriteString("[")
for j, vert := range face.verts {
length := math.Sqrt((vert.X * vert.X) + (vert.Y * vert.Y) + (vert.Z * vert.Z))
x := vert.X / length
y := vert.Y / length
z := vert.Z / length
normal := fmt.Sprintf("[%f,%f,%f]", x, y, z)
writer.WriteString(normal)
if(j < len(face.verts)-1) { writer.WriteString(",") }
}
writer.WriteString("]")
//writer.WriteString("[0, 1, 0]")
if i < len(Faces) - 1 { writer.WriteString(",") }
writer.WriteString("\n")
}
// Write out the tex coords
writer.WriteString("],\"tex\":[")
for i, face := range Faces {
writer.WriteString("[")
writer.WriteString(face.tex[0].String())
writer.WriteString(",")
writer.WriteString(face.tex[1].String())
writer.WriteString(",")
writer.WriteString(face.tex[2].String())
writer.WriteString("]")
if i < len(Faces) - 1 { writer.WriteString(",") }
writer.WriteString("\n")
}
// Close obj block
writer.WriteString("]}]");
if parseMaterial {
writer.WriteString(",mat:[{");
reader := bufio.NewReader(mtlFile)
// Read the file in and parse out what we need
for {
if part, prefix, err = reader.ReadLine(); err != nil {
break
}
buffer.Write(part)
if !prefix {
line := buffer.String()
if(strings.Contains(line, "map_Kd ")) {
parts := strings.Split(line, " ")
entry := fmt.Sprintf("\"t\":\"%s\",", parts[1])
writer.WriteString(entry)
width, height := 256, 256
var imageFile *os.File
if imageFile, err = os.Open(parts[1]); err != nil {
fmt.Printf("--"+FgRed+"Failed to find %s, defaulting to 256x256"+Reset+"\n", parts[1])
return
} else {
var config image.Config
imageReader := bufio.NewReader(imageFile)
config, err = jpeg.DecodeConfig(imageReader)
width, height = config.Width, config.Height
fmt.Printf("--"+FgGreen+"Verifing"+Reset+" that %s is %dpx x %dpx\n", parts[1], width, height)
}
size := fmt.Sprintf("\"w\":%d,\"h\":%d,", width, height)
writer.WriteString(size)
} else if(strings.Contains(line, "Kd ")) {
parts := strings.Split(line, " ")
entry := fmt.Sprintf("\"r\":%s, \"g\":%s, \"b\":%s,", parts[1], parts[2], parts[3])
writer.WriteString(entry)
}
buffer.Reset()
}
}
if err == io.EOF {
err = nil
}
writer.WriteString("\"res\":100,\"uv\":true}]");
}
// Close json
writer.WriteString("}");
writer.Flush()
jsonFile.Close()
fmt.Printf("--"+FgGreen+"Exported"+Reset+" %d faces with %d verticies\n", len(Faces), len(Verts))
finished <- -1
}
func main(){
// Verify we were called correctly
if len(os.Args) < 2 {
fmt.Println("Usage: go run objParser.go <OBJ File>");
return
}
files := len(os.Args)
finished := make(chan int)
now := time.Now()
for i := 1; i < files; i++ {
go parseFile(os.Args[i], finished)
}
for i := 1; i < files; i++ {
<- finished
}
fmt.Printf("Parsed %d files in %s\n", files-1, time.Since(now))
}
You should set GOMAXPROCS environment variable for go to the maximum number of usable processors. Or use function GOMAXPROCS at executing time.

Resources