// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

package gccgoimporter

import (
	
	
	
	
	
	
	
	
)

// Magic strings for different archive file formats.
const (
	armag  = "!<arch>\n"
	armagt = "!<thin>\n"
	armagb = "<bigaf>\n"
)

// Offsets and sizes for fields in a standard archive header.
const (
	arNameOff  = 0
	arNameSize = 16
	arDateOff  = arNameOff + arNameSize
	arDateSize = 12
	arUIDOff   = arDateOff + arDateSize
	arUIDSize  = 6
	arGIDOff   = arUIDOff + arUIDSize
	arGIDSize  = 6
	arModeOff  = arGIDOff + arGIDSize
	arModeSize = 8
	arSizeOff  = arModeOff + arModeSize
	arSizeSize = 10
	arFmagOff  = arSizeOff + arSizeSize
	arFmagSize = 2

	arHdrSize = arFmagOff + arFmagSize
)

// The contents of the fmag field of a standard archive header.
const arfmag = "`\n"

// arExportData takes an archive file and returns a ReadSeeker for the
// export data in that file. This assumes that there is only one
// object in the archive containing export data, which is not quite
// what gccgo does; gccgo concatenates together all the export data
// for all the objects in the file.  In practice that case does not arise.
func arExportData( io.ReadSeeker) (io.ReadSeeker, error) {
	if ,  := .Seek(0, io.SeekStart);  != nil {
		return nil, 
	}

	var  [len(armag)]byte
	if ,  := .Read([:]);  != nil {
		return nil, 
	}

	switch string([:]) {
	case armag:
		return standardArExportData()
	case armagt:
		return nil, errors.New("unsupported thin archive")
	case armagb:
		return aixBigArExportData()
	default:
		return nil, fmt.Errorf("unrecognized archive file format %q", [:])
	}
}

// standardArExportData returns export data from a standard archive.
func standardArExportData( io.ReadSeeker) (io.ReadSeeker, error) {
	 := int64(len(armag))
	for {
		var  [arHdrSize]byte
		if ,  := .Read([:]);  != nil {
			return nil, 
		}
		 += arHdrSize

		if bytes.Compare([arFmagOff:arFmagOff+arFmagSize], []byte(arfmag)) != 0 {
			return nil, fmt.Errorf("archive header format header (%q)", [:])
		}

		,  := strconv.ParseInt(strings.TrimSpace(string([arSizeOff:arSizeOff+arSizeSize])), 10, 64)
		if  != nil {
			return nil, fmt.Errorf("error parsing size in archive header (%q): %v", [:], )
		}

		 := [arNameOff : arNameOff+arNameSize]
		if [0] == '/' && ([1] == ' ' || [1] == '/' || bytes.Compare([:8], []byte("/SYM64/ ")) == 0) {
			// Archive symbol table or extended name table,
			// which we don't care about.
		} else {
			 := readerAtFromSeeker()
			,  := elfFromAr(io.NewSectionReader(, , ))
			if  != nil ||  != nil {
				return , 
			}
		}

		if &1 != 0 {
			++
		}
		 += 
		if ,  := .Seek(, io.SeekStart);  != nil {
			return nil, 
		}
	}
}

// elfFromAr tries to get export data from an archive member as an ELF file.
// If there is no export data, this returns nil, nil.
func elfFromAr( *io.SectionReader) (io.ReadSeeker, error) {
	,  := elf.NewFile()
	if  != nil {
		return nil, 
	}
	 := .Section(".go_export")
	if  == nil {
		return nil, nil
	}
	return .Open(), nil
}

// aixBigArExportData returns export data from an AIX big archive.
func aixBigArExportData( io.ReadSeeker) (io.ReadSeeker, error) {
	 := readerAtFromSeeker()
	,  := xcoff.NewArchive()
	if  != nil {
		return nil, 
	}

	for ,  := range .Members {
		,  := .GetFile(.Name)
		if  != nil {
			return nil, 
		}
		 := .CSect(".go_export")
		if  != nil {
			return bytes.NewReader(), nil
		}
	}

	return nil, fmt.Errorf(".go_export not found in this archive")
}

// readerAtFromSeeker turns an io.ReadSeeker into an io.ReaderAt.
// This is only safe because there won't be any concurrent seeks
// while this code is executing.
func readerAtFromSeeker( io.ReadSeeker) io.ReaderAt {
	if ,  := .(io.ReaderAt);  {
		return 
	}
	return seekerReadAt{}
}

type seekerReadAt struct {
	seeker io.ReadSeeker
}

func ( seekerReadAt) ( []byte,  int64) (int, error) {
	if ,  := .seeker.Seek(, io.SeekStart);  != nil {
		return 0, 
	}
	return .seeker.Read()
}