'From Squeak5.3alpha of 18 January 2019 [latest update: #18380] on 30 January 2019 at 5:55:20 pm'! "Change Set: RemoveHTTPClient Date: 30 January 2019 Author: tim@rowledge.org A pass at removing HTTPClient and redirecting any users to other classes; mostly HttpSocket for now. We should consider further diversion to WebClient in the future and get rid of HttpSocket - or at least make it a better facade for WebClient. These changes need some attention from anyone conversant with the remote directory/server stuff in case I broke sometihng."! !AutoStart class methodsFor: 'updating' stamp: 'tpr 1/30/2019 17:16'! processUpdates "Process update files from a well-known update server. This method is called at system startup time, Only if the preference #updateFromServerAtStartup is true is the actual update processing undertaken automatically" | choice | (MCMcmUpdater updateFromServerAtStartup) ifTrue: [choice := UIManager default chooseFrom: #('Yes, Update' 'No, Not now' 'Don''t ask again') title: 'Shall I look for new code\updates on the server?' withCRs. choice = 1 ifTrue: [ MCMcmUpdater updateFromServer]. choice = 3 ifTrue: [ MCMcmUpdater updateFromServerAtStartup: false. self inform: 'Remember to save your image to make this setting permant']]! ! !AutoStart class methodsFor: 'initialization' stamp: 'tpr 1/30/2019 17:16'! startUp: resuming "The image is either being newly started (resuming is true), or it's just been snapshotted. If this has just been a snapshot, skip all the startup stuff." | startupParameters launchers | self active ifTrue: [^self]. self active: true. resuming ifFalse: [^self]. startupParameters := Smalltalk namedArguments. Project current world ifNotNil: [:w | w install. w firstHand position: 100 @ 100 ]. self processUpdates. launchers := self installedLaunchers collect: [:launcher | launcher new]. launchers do: [:launcher | launcher parameters: startupParameters]. launchers do: [:launcher | Project current addDeferredUIMessage: [launcher startUp]]! ! !HTTPLoader class methodsFor: 'accessing' stamp: 'tpr 1/30/2019 17:19'! httpRequestClass ^HTTPDownloadRequest! ! !HTTPServerDirectory methodsFor: 'file directory' stamp: 'tpr 1/30/2019 17:26'! entries | answer ftpEntries | answer := HTTPSocket httpGetDocument: self dirListUrl. answer isString ifTrue: [^self error: 'Listing failed: ' , answer] ifFalse: [answer := answer content]. answer first == $< ifTrue: [self error: 'Listing failed: ' , answer]. ftpEntries := answer findTokens: String crlf. ^ ftpEntries collect:[:ftpEntry | self class parseFTPEntry: ftpEntry] thenSelect: [:entry | entry notNil]! ! !PRServerDirectory methodsFor: 'squeaklets' stamp: 'tpr 1/30/2019 17:34'! writeProject: aProject inFileNamed: fileNameString fromDirectory: localDirectory "write aProject (a file version can be found in the file named fileNameString in localDirectory)" | url arguments string | url := self urlFromServer: self server directories: {'programmatic'. 'uploadproject'}. arguments := self getPostArgsFromProject: aProject fileNamed: fileNameString fromDirectory: localDirectory. string := Cursor read showWhile: [ (HTTPSocket httpPostDocument: url args: arguments) contents]. (string beginsWith: '--OK--') ifTrue: [^ true]. self inform: ('Server responded: {1}' translated format: {string}). ^ false! ! !PRServerDirectory methodsFor: 'private' stamp: 'tpr 1/30/2019 17:33'! getLines "private - answer a collection of lines with the server response" | url lines string | url := self urlFromServer: self server directories: {'programmatic'} , self directories. url := url , self slash. string := Cursor read showWhile: [(HTTPSocket httpGetDocument: url) contents]. (string beginsWith: '--OK--') ifFalse: [^ nil]. lines := OrderedCollection new. (string allButFirst: 6) linesDo: [:line | lines add: line squeakToIso]. ^ lines! ! !Project methodsFor: 'SuperSwiki' stamp: 'tpr 1/30/2019 17:39'! tellAFriend: emailAddressOrNil " Project current tellAFrien " | urlForLoading | (urlForLoading := self urlForLoading) ifNil: [urlForLoading := self url "fallback for dtp servers"]. urlForLoading isEmptyOrNil ifTrue: [^ self inform: 'Since this project has not been saved yet, I cannot tell someone where it is.' translated]. FancyMailComposition new celeste: nil to: (emailAddressOrNil ifNil: ['RECIPIENT.GOESHERE']) subject: 'New/Updated Squeak project' initialText: 'This is a link to the Squeak project ' , self name , ': ' , String crlf theLinkToInclude: urlForLoading; open! ! !ProjectLauncher methodsFor: 'initialization' stamp: 'tpr 1/30/2019 17:39'! initialize super initialize. showSplash := true! ! !ProjectLauncher methodsFor: 'running' stamp: 'tpr 1/30/2019 17:41'! startUpAfterLogin | scriptName loader isUrl | self setupFlaps. Preferences readDocumentAtStartup ifTrue: [scriptName := Smalltalk documentPath ifNil: ['']. scriptName := scriptName convertFromSystemString. scriptName isEmpty ifFalse: ["figure out if script name is a URL by itself" isUrl := (scriptName asLowercase beginsWith: 'http://') or: [(scriptName asLowercase beginsWith: 'file://') or: [scriptName asLowercase beginsWith: 'ftp://']]. isUrl ifFalse: [| encodedPath pathTokens | "Allow for ../dir/scriptName arguments" pathTokens := scriptName splitBy: FileDirectory slash. pathTokens := pathTokens collect: [:s | s encodeForHTTP]. encodedPath := pathTokens reduce: [:acc :each | acc , FileDirectory slash , each]. scriptName := (FileDirectory default uri resolveRelativeURI: encodedPath) asString]]] ifFalse: [scriptName := '']. scriptName isEmptyOrNil ifTrue: [^ Preferences eToyFriendly ifTrue: [self currentWorld addGlobalFlaps]]. loader := CodeLoader new. loader loadSourceFiles: (Array with: scriptName). (scriptName asLowercase endsWith: '.pr') ifTrue: [self installProjectFrom: loader] ifFalse: [loader installSourceFiles]! ! !ResourceCollector methodsFor: 'resource writing' stamp: 'tpr 1/30/2019 17:43'! writeResourceForm: aForm fromLocator: aLocator "The given form has been externalized before. If it was reasonably compressed, use the bits of the original data - this allows us to recycle GIF, JPEG, PNG etc. data without using the internal compression (which is in most cases inferior). If necessary the data will be retrieved from its URL location. This retrieval is done only if the resouce comes from either * the local disk (in which case the file has never been published) * the browser cache (in which case we don't cache the resource locally) In any other case we will *not* attempt to retrieve it, because doing so can cause the system to connect to the network which is probably not what we want. It should be a rare case anyways; could only happen if one clears the squeak cache selectively." | fName fStream url data | "Try to be smart about the name of the file" fName := (aLocator urlString includes: $:) ifTrue: [ url := aLocator urlString asUrl. url path last] ifFalse: [aLocator urlString]. fName isEmptyOrNil ifFalse:[fName := fName asFileName]. (fName isEmptyOrNil or:[localDirectory isAFileNamed: fName]) ifTrue:[ "bad luck -- duplicate name" fName := localDirectory nextNameFor:'resource' extension: (FileDirectory extensionFor: aLocator urlString)]. "Let's see if we have cached it locally" ResourceManager lookupCachedResource: self baseUrl , aLocator urlString ifPresentDo:[:stream | data := stream upToEnd]. "Check if the cache entry is without qualifying baseUrl. Workaround for older versions." data ifNil:[ ResourceManager lookupCachedResource: aLocator urlString ifPresentDo:[:stream | data := stream upToEnd]]. data ifNil:[ "We don't have it cached locally. Retrieve it from its original location." (url notNil and: [url hasRemoteContents]) ifTrue:[^nil]. "see note above" (Url schemeNameForString: aLocator urlString) ifNil: [^nil]. data := HTTPLoader default retrieveContentsFor: aLocator urlString. data ifNil:[^nil]. data := data content. ]. "data size > aForm bits byteSize ifTrue:[^nil]." fStream := localDirectory newFileNamed: fName. fStream binary. fStream nextPutAll: data. fStream close. ^{fName. data size}! ! !ResourceManager class methodsFor: 'resource caching' stamp: 'tpr 1/30/2019 17:43'! cacheResource: urlString stream: aStream | fd localName file buf | (self resourceCache at: urlString ifAbsent:[#()]) size > 0 ifTrue:[^self]. "don't waste space" fd := Project squeakletDirectory. localName := fd nextNameFor: 'resource' extension:'cache'. file := fd forceNewFileNamed: localName. buf := ByteArray new: 10000. aStream binary. file binary. [aStream atEnd] whileFalse:[ buf := aStream next: buf size into: buf. file nextPutAll: buf. ]. file close. "update cache" file := [fd oldFileNamed: self resourceCacheName] on: FileDoesNotExistException do:[:ex| fd forceNewFileNamed: self resourceCacheName]. file setToEnd. file nextPutAll: urlString; cr. file nextPutAll: localName; cr. file close. self addCacheLocation: localName for: urlString. aStream position: 0. ! ! !ServerDirectory class methodsFor: '*eToys-school support' stamp: 'tpr 1/30/2019 17:45'! parseEToyUserListFrom: urlString | url userString userList | urlString ifNil:[^nil]. url := urlString asUrl. userString := ["Note: We need to prevent going through the plugin API when retrieving a local (file) URL, since the plugin API (correctly) rejects file:// downloads." Cursor wait showWhile:[ (url hasRemoteContents) ifTrue:[ "Go through the browser (if present)" (HTTPSocket httpGet: url asString) contents. ] ifFalse:[ "Go grab it directly" url retrieveContents contents. ]. ]. ] on: Error do:[nil]. userString ifNil:[^nil]. userList := userString lines collect: [:each| each withBlanksTrimmed]. userList := userList reject:[:any| any isEmpty]. (userList first = '##user list##') ifFalse:[^nil]. userList := userList copyFrom: 2 to: userList size. ^userList! ! !URI class methodsFor: 'class initialization' stamp: 'tpr 1/30/2019 17:48'! initialize "URI initialize" ClientClasses := Dictionary new. ClientClasses "at: 'http' put: #HTTPClient; <_ not sure what to replace this with, if anything. Is URI even wnated anymore?" at: 'ftp' put: #FTPClient; at: 'file' put: #FileDirectory ! ! !UpdateStreamDownloader class methodsFor: 'fetching updates' stamp: 'tpr 1/30/2019 17:48'! newUpdatesOn: serverList special: indexPrefix throughNumber: aNumber "Return a list of fully formed URLs of update files we do not yet have. Go to the listed servers and look at the file 'updates.list' for the names of the last N update files. We look backwards for the first one we have, and make the list from there. tk 9/10/97 No updates numbered higher than aNumber (if it is not nil) are returned " | existing out maxNumber | maxNumber := aNumber ifNil: [99999]. out := OrderedCollection new. existing := SystemVersion current updates. serverList do: [:server | | raw doc list char | doc := HTTPSocket httpGet: 'http://' , server,indexPrefix,'updates.list'. "test here for server being up" doc class == RWBinaryOrTextStream ifTrue: [raw := doc reset; contents. "one file name per line" list := self extractThisVersion: raw. list reverseDo: [:fileName | | ff itsNumber | ff := (fileName findTokens: '/') last. "allow subdirectories" itsNumber := ff initialIntegerOrNil. (existing includes: itsNumber) ifFalse: [ (itsNumber == nil or: [itsNumber <= maxNumber]) ifTrue: [out addFirst: 'http://' , server, fileName]] ifTrue: [^ out]]. ((out size > 0) or: [char := doc reset; skipSeparators; next. (char == $*) | (char == $#)]) ifTrue: [^ out "we have our list"]]. "else got error msg instead of file" "Server was down, try next one"]. self inform: 'All code update servers seem to be unavailable'. ^ out! ! !UpdateStreamDownloader class methodsFor: 'fetching updates' stamp: 'tpr 1/30/2019 17:49'! retrieveUrls: urls ontoQueue: queue withWaitSema: waitSema "download the given list of URLs. The queue will be loaded alternately with url's and with the retrieved contents. If a download fails, the contents will be #failed. If all goes well, a special pair with an empty URL and the contents #finished will be put on the queue. waitSema is waited on every time before a new document is downloaded; this keeps the downloader from getting too far ahead of the main process" "kill the existing downloader if there is one" | updateCounter | UpdateDownloader ifNotNil: [UpdateDownloader terminate]. updateCounter := 0. "fork a new downloading process" UpdateDownloader := [ 'Downloading updates' displayProgressFrom: 0 to: urls size during: [:bar | urls do: [:url | | front canPeek doc | waitSema wait. queue nextPut: url. doc := HTTPSocket httpGet: url. doc isString ifTrue: [queue nextPut: #failed. UpdateDownloader := nil. Processor activeProcess terminate] ifFalse: [canPeek := 120 min: doc size. front := doc next: canPeek. doc skip: -1 * canPeek. (front beginsWith: '