I am developing a Swift MacOS app for drawing. The following subset of my code shows my problem.
import Cocoa
class AppDelegate: NSObject, NSApplicationDelegate {
func applicationDidFinishLaunching(_ aNotification: Notification) {
var window: NSWindow!
window = NSWindow(contentRect: NSMakeRect( 0, 0, 1000, 1000 ),styleMask:[.titled, .closable, .miniaturizable, .resizable], backing: .buffered, defer: false)
window.makeKeyAndOrderFront(window)
print("windowHeight = \(window.frame.height)")
print("windowWidth = \(window.frame.width)")
let view = Renderer( frame:NSMakeRect( 0, 0, 1000, 1000 ) )
view.autoresizingMask = [.width, .height]
window.contentView!.addSubview (view)
print("viewHeight = \(view.frame.height)")
print("viewWidth = \(view.frame.width)")
}
func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool{return true}
func applicationWillTerminate(_ aNotification: Notification) { }
}
let appDelegate = AppDelegate()
let application = NSApplication.shared
application.setActivationPolicy(.regular)
application.delegate = appDelegate
application.activate(ignoringOtherApps:true)
application.run()
class Renderer: NSView {
override func draw(_ rect: NSRect) {
super.draw(rect)
print("rectHeight = \(rect.height)")
print("rectWidth = \(rect.width)")
guard let gc = NSGraphicsContext.current?.cgContext else {return} // gc = graphics context
print("gc.height = \(gc.height)")
print("gc.width = \(gc.width)")
// The rest of my drawing code goes here.
}
}
When running this code, the printout is:
windowHeight = 1022.0 windowWidth = 1000.0
viewHeight = 1000.0 viewWidth = 1000.0
rectHeight = 1000.0 rectWidth = 1000.0
gc.height = 2000 gc.width = 2000
When I've declared the window, view, and rect to all be of size 1000 by 1000, why does the current GraphicsContext say that it is size 2000 by 2000 ?
Because your computer has a double resolution (Retina) screen.